diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index de32cee71dfc97..4dca7118c11348 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -65,7 +65,7 @@ jobs: - name: Upload Artifacts if: ${{ steps.build.outputs.channel != '' || steps.build.outputs.tag != '' }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: windows-artifact path: windows-release/ @@ -76,7 +76,7 @@ jobs: runs-on: ubuntu-20.04 steps: - name: Download - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: windows-artifact path: ./windows-release @@ -96,7 +96,7 @@ jobs: runs-on: ubuntu-20.04 steps: - name: Download - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: windows-artifact path: ./windows-release/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 66aadb9b702c2c..8c809f2b78b115 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,7 @@ Release channels have their own copy of this changelog: * `agave-validator`: Update PoH speed check to compare against current hash rate from a Bank (#2447) * `solana-test-validator`: Add `--clone-feature-set` flag to mimic features from a target cluster (#2480) * `solana-genesis`: the `--cluster-type` parameter now clones the feature set from the target cluster (#2587) + * `unified-scheduler` as default option for `--block-verification-method` (#2653) ## [2.0.0] * Breaking diff --git a/Cargo.lock b/Cargo.lock index cd85ad9bc60a00..90d37815cebba9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -73,6 +73,7 @@ dependencies = [ "memmap2", "rayon", "solana-accounts-db", + "solana-clap-utils", "solana-program", "solana-version", ] @@ -585,9 +586,9 @@ checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" [[package]] name = "arrayvec" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "ascii" @@ -723,7 +724,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -876,7 +877,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -1033,7 +1034,7 @@ dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", "syn_derive", ] @@ -1150,22 +1151,22 @@ checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" [[package]] name = "bytemuck" -version = "1.16.3" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "102087e286b4677862ea56cf8fc58bb2cdfa8725c40ffb80fe3a008eb7f2fc83" +checksum = "6fd4c6dcc3b0aea2f5c0b4b82c2b15fe39ddbc76041a310848f4706edf76bb31" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.7.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee891b04274a59bd38b412188e24b849617b2e45a0fd8d057deb63e7403761b" +checksum = "0cc8b54b395f2fcfbb3d90c47b01c7f444d94d05bdeb775811dec868ac3bbc26" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -1776,7 +1777,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -1787,7 +1788,7 @@ checksum = "29a358ff9f12ec09c3e61fef9b5a9902623a695a46a917b07f269bff1445611a" dependencies = [ "darling_core", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -1849,7 +1850,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -1973,7 +1974,7 @@ checksum = "a6cbae11b3de8fce2a456e8ea3dada226b35fe791f0dc1d360c0941f0bb681f3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -2079,7 +2080,7 @@ checksum = "03cdc46ec28bd728e67540c528013c6a10eb69a02eb31078a1bda695438cbfb8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -2349,7 +2350,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -3217,9 +3218,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.155" +version = "0.2.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" [[package]] name = "libloading" @@ -3675,7 +3676,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -3748,7 +3749,7 @@ dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -4373,7 +4374,7 @@ checksum = "9e2e25ee72f5b24d773cae88422baddefff7714f97aab68d96fe2b6fc4a28fb2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -5073,9 +5074,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.207" +version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5665e14a49a4ea1b91029ba7d3bca9f299e1f7cfa194388ccc20f14743e784f2" +checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2" dependencies = [ "serde_derive", ] @@ -5091,20 +5092,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.207" +version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aea2634c86b0e8ef2cfdc0c340baede54ec27b1e46febd7f80dffb2aa44a00e" +checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] name = "serde_json" -version = "1.0.124" +version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d" +checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed" dependencies = [ "itoa", "memchr", @@ -5152,7 +5153,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -5202,7 +5203,7 @@ checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -6425,7 +6426,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version 0.4.0", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -6707,6 +6708,7 @@ dependencies = [ "solana-vote", "solana-vote-program", "static_assertions", + "strum", "tempfile", "trees", ] @@ -6850,7 +6852,7 @@ version = "2.1.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", "toml 0.8.12", ] @@ -7402,6 +7404,7 @@ dependencies = [ name = "solana-runtime-transaction" version = "2.1.0" dependencies = [ + "agave-transaction-view", "bincode", "criterion", "log", @@ -7489,7 +7492,7 @@ dependencies = [ "bs58", "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -8336,7 +8339,7 @@ checksum = "d9e8418ea6269dcfb01c712f0444d2c75542c04448b480e87de59d2865edc750" dependencies = [ "quote", "spl-discriminator-syn", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -8348,7 +8351,7 @@ dependencies = [ "proc-macro2", "quote", "sha2 0.10.8", - "syn 2.0.74", + "syn 2.0.75", "thiserror", ] @@ -8407,7 +8410,7 @@ dependencies = [ "proc-macro2", "quote", "sha2 0.10.8", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -8595,9 +8598,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.74" +version = "2.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7" +checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9" dependencies = [ "proc-macro2", "quote", @@ -8613,7 +8616,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -8799,7 +8802,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -8811,7 +8814,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", "test-case-core", ] @@ -8847,7 +8850,7 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -8984,7 +8987,7 @@ source = "git+https://github.com/anza-xyz/solana-tokio.git?rev=7cf47705faacf7bf0 dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -9228,7 +9231,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -9538,7 +9541,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", "wasm-bindgen-shared", ] @@ -9572,7 +9575,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -9931,7 +9934,7 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] @@ -9951,7 +9954,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.74", + "syn 2.0.75", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 939b9aff789987..2a772d3ee85d3d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -182,7 +182,7 @@ ark-ff = "0.4.0" ark-serialize = "0.4.0" array-bytes = "=1.4.1" arrayref = "0.3.8" -arrayvec = "0.7.4" +arrayvec = "0.7.6" assert_cmd = "2.0" assert_matches = "1.5.0" async-channel = "1.9.0" @@ -199,8 +199,8 @@ bs58 = "0.5.1" bv = "0.11.1" byte-unit = "4.0.19" bytecount = "0.6.8" -bytemuck = "1.16.3" -bytemuck_derive = "1.7.0" +bytemuck = "1.17.0" +bytemuck_derive = "1.7.1" byteorder = "1.5.0" bytes = "1.7" bzip2 = "0.4.4" @@ -273,7 +273,7 @@ jsonrpc-ipc-server = "18.0.0" jsonrpc-pubsub = "18.0.0" lazy-lru = "0.1.3" lazy_static = "1.5.0" -libc = "0.2.155" +libc = "0.2.158" libloading = "0.7.4" libsecp256k1 = { version = "0.6.0", default-features = false, features = [ "std", @@ -329,10 +329,10 @@ rustls = { version = "0.21.12", default-features = false, features = ["quic"] } scopeguard = "1.2.0" semver = "1.0.23" seqlock = "0.2.0" -serde = "1.0.207" # must match the serde_derive version, see https://github.com/serde-rs/serde/issues/2584#issuecomment-1685252251 +serde = "1.0.208" # must match the serde_derive version, see https://github.com/serde-rs/serde/issues/2584#issuecomment-1685252251 serde_bytes = "0.11.15" -serde_derive = "1.0.207" # must match the serde version, see https://github.com/serde-rs/serde/issues/2584#issuecomment-1685252251 -serde_json = "1.0.124" +serde_derive = "1.0.208" # must match the serde version, see https://github.com/serde-rs/serde/issues/2584#issuecomment-1685252251 +serde_json = "1.0.125" serde_with = { version = "2.3.3", default-features = false } serde_yaml = "0.9.34" serial_test = "2.0.0" diff --git a/accounts-db/accounts-hash-cache-tool/Cargo.toml b/accounts-db/accounts-hash-cache-tool/Cargo.toml index 908875a4662ceb..241c849d2668e5 100644 --- a/accounts-db/accounts-hash-cache-tool/Cargo.toml +++ b/accounts-db/accounts-hash-cache-tool/Cargo.toml @@ -16,6 +16,7 @@ clap = { workspace = true } memmap2 = { workspace = true } rayon = { workspace = true } solana-accounts-db = { workspace = true } +solana-clap-utils = { workspace = true } solana-program = { workspace = true } solana-version = { workspace = true } diff --git a/accounts-db/accounts-hash-cache-tool/src/main.rs b/accounts-db/accounts-hash-cache-tool/src/main.rs index 076568b4759498..b692ca9482fe25 100644 --- a/accounts-db/accounts-hash-cache-tool/src/main.rs +++ b/accounts-db/accounts-hash-cache-tool/src/main.rs @@ -12,6 +12,7 @@ use { pubkey_bins::PubkeyBinCalculator24, CacheHashDataFileEntry, CacheHashDataFileHeader, ParsedCacheHashDataFilename, }, + solana_clap_utils::input_parsers::values_of, solana_program::pubkey::Pubkey, std::{ cmp::{self, Ordering}, @@ -20,6 +21,7 @@ use { iter, mem::size_of, num::Saturating, + ops::Range, path::{Path, PathBuf}, str, sync::RwLock, @@ -146,17 +148,24 @@ fn main() { ), ) .arg( - Arg::with_name("bin_of_interest") - .long("bin-of-interest") + Arg::with_name("bins_of_interest") + .long("bins-of-interest") .takes_value(true) - .value_name("INDEX") - .help("Specifies a single bin to diff") + .value_name("BINS") + .min_values(1) + .max_values(2) + .value_delimiter("-") + .require_delimiter(true) + .multiple(false) + .help("Specifies bins to diff") .long_help( - "Specifies a single bin to diff. \ + "Specifies bins to diff. \ When diffing large state that does not fit in memory, \ - it may be neccessary to diff a subset at a time. \ - Use this arg to limit the state to a single bin. \ - The INDEX must be less than --bins." + it may be necessary to diff a subset at a time. \ + Use this arg to limit the state to bins of interest. \ + This arg takes either a single bin or a bin range. \ + A bin range is specified as \"start-end\", where \ + \"start\" is inclusive, and \"end\" is exclusive." ), ), ), @@ -225,22 +234,20 @@ fn cmd_diff_state( let path1 = value_t_or_exit!(subcommand_matches, "path1", String); let path2 = value_t_or_exit!(subcommand_matches, "path2", String); let num_bins = value_t_or_exit!(subcommand_matches, "bins", usize); - let bin_of_interest = - if let Some(bin_of_interest) = subcommand_matches.value_of("bin_of_interest") { - let bin_of_interest = bin_of_interest - .parse() - .map_err(|err| format!("argument 'bin-of-interest' is not a valid value: {err}"))?; - if bin_of_interest >= num_bins { - return Err(format!( - "argument 'bin-of-interest' must be less than 'bins', \ - bins: {num_bins}, bin-of-interest: {bin_of_interest}", - )); + + let bins_of_interest = + if let Some(bins) = values_of::(subcommand_matches, "bins_of_interest") { + match bins.len() { + 1 => bins[0]..bins[0].saturating_add(1), + 2 => bins[0]..bins[1], + _ => { + unreachable!("invalid number of values given to bins_of_interest.") + } } - Some(bin_of_interest) } else { - None + 0..usize::MAX }; - do_diff_state(path1, path2, num_bins, bin_of_interest) + do_diff_state(path1, path2, num_bins, bins_of_interest) } fn do_inspect(file: impl AsRef, force: bool) -> Result<(), String> { @@ -510,7 +517,7 @@ fn do_diff_state( dir1: impl AsRef, dir2: impl AsRef, num_bins: usize, - bin_of_interest: Option, + bins_of_interest: Range, ) -> Result<(), String> { let extract = |dir: &Path| -> Result<_, String> { let files = @@ -521,7 +528,7 @@ fn do_diff_state( } = extract_binned_latest_entries_in( files.iter().map(|file| &file.path), num_bins, - bin_of_interest, + &bins_of_interest, ) .map_err(|err| format!("failed to extract entries: {err}"))?; let num_accounts: usize = latest_entries.iter().map(|bin| bin.len()).sum(); @@ -699,7 +706,7 @@ fn extract_latest_entries_in(file: impl AsRef) -> Result) -> Result>, num_bins: usize, - bin_of_interest: Option, + bins_of_interest: &Range, ) -> Result { - if let Some(bin_of_interest) = bin_of_interest { - assert!(bin_of_interest < num_bins); - } - let binner = PubkeyBinCalculator24::new(num_bins); let mut entries: Box<_> = iter::repeat_with(HashMap::default).take(num_bins).collect(); let mut capitalization = Saturating(0); @@ -744,11 +746,8 @@ fn extract_binned_latest_entries_in( let num_entries = scan_mmap(&mmap, |entry| { let bin = binner.bin_from_pubkey(&entry.pubkey); - if let Some(bin_of_interest) = bin_of_interest { - // Is this the bin of interest? If not, skip it. - if bin != bin_of_interest { - return; - } + if !bins_of_interest.contains(&bin) { + return; } capitalization += entry.lamports; diff --git a/accounts-db/src/accounts.rs b/accounts-db/src/accounts.rs index 9033ceea6e6da5..eb416c29e2f4e9 100644 --- a/accounts-db/src/accounts.rs +++ b/accounts-db/src/accounts.rs @@ -15,13 +15,15 @@ use { account::{AccountSharedData, ReadableAccount}, address_lookup_table::{self, error::AddressLookupError, state::AddressLookupTable}, clock::{BankId, Slot}, - message::v0::{LoadedAddresses, MessageAddressTableLookup}, + message::v0::LoadedAddresses, pubkey::Pubkey, slot_hashes::SlotHashes, transaction::{Result, SanitizedTransaction}, transaction_context::TransactionAccount, }, - solana_svm_transaction::svm_message::SVMMessage, + solana_svm_transaction::{ + message_address_table_lookup::SVMMessageAddressTableLookup, svm_message::SVMMessage, + }, std::{ cmp::Reverse, collections::{BinaryHeap, HashSet}, @@ -82,12 +84,12 @@ impl Accounts { pub fn load_lookup_table_addresses( &self, ancestors: &Ancestors, - address_table_lookup: &MessageAddressTableLookup, + address_table_lookup: SVMMessageAddressTableLookup, slot_hashes: &SlotHashes, ) -> std::result::Result { let table_account = self .accounts_db - .load_with_fixed_root(ancestors, &address_table_lookup.account_key) + .load_with_fixed_root(ancestors, address_table_lookup.account_key) .map(|(account, _rent)| account) .ok_or(AddressLookupError::LookupTableAccountNotFound)?; @@ -99,12 +101,12 @@ impl Accounts { Ok(LoadedAddresses { writable: lookup_table.lookup( current_slot, - &address_table_lookup.writable_indexes, + address_table_lookup.writable_indexes, slot_hashes, )?, readonly: lookup_table.lookup( current_slot, - &address_table_lookup.readonly_indexes, + address_table_lookup.readonly_indexes, slot_hashes, )?, }) @@ -611,7 +613,7 @@ mod tests { address_lookup_table::state::LookupTableMeta, hash::Hash, instruction::CompiledInstruction, - message::{Message, MessageHeader}, + message::{v0::MessageAddressTableLookup, Message, MessageHeader}, native_loader, signature::{signers::Signers, Keypair, Signer}, transaction::{Transaction, TransactionError, MAX_TX_ACCOUNT_LOCKS}, @@ -708,7 +710,7 @@ mod tests { assert_eq!( accounts.load_lookup_table_addresses( &ancestors, - &address_table_lookup, + SVMMessageAddressTableLookup::from(&address_table_lookup), &SlotHashes::default(), ), Err(AddressLookupError::LookupTableAccountNotFound), @@ -735,7 +737,7 @@ mod tests { assert_eq!( accounts.load_lookup_table_addresses( &ancestors, - &address_table_lookup, + SVMMessageAddressTableLookup::from(&address_table_lookup), &SlotHashes::default(), ), Err(AddressLookupError::InvalidAccountOwner), @@ -762,7 +764,7 @@ mod tests { assert_eq!( accounts.load_lookup_table_addresses( &ancestors, - &address_table_lookup, + SVMMessageAddressTableLookup::from(&address_table_lookup), &SlotHashes::default(), ), Err(AddressLookupError::InvalidAccountData), @@ -801,7 +803,7 @@ mod tests { assert_eq!( accounts.load_lookup_table_addresses( &ancestors, - &address_table_lookup, + SVMMessageAddressTableLookup::from(&address_table_lookup), &SlotHashes::default(), ), Ok(LoadedAddresses { diff --git a/builtins-default-costs/src/lib.rs b/builtins-default-costs/src/lib.rs index 48a210b2197338..43c5c3043fcfc0 100644 --- a/builtins-default-costs/src/lib.rs +++ b/builtins-default-costs/src/lib.rs @@ -40,3 +40,17 @@ lazy_static! { .cloned() .collect(); } + +lazy_static! { + /// A table of 256 booleans indicates whether the first `u8` of a Pubkey exists in + /// BUILTIN_INSTRUCTION_COSTS. If the value is true, the Pubkey might be a builtin key; + /// if false, it cannot be a builtin key. This table allows for quick filtering of + /// builtin program IDs without the need for hashing. + pub static ref MAYBE_BUILTIN_KEY: [bool; 256] = { + let mut temp_table: [bool; 256] = [false; 256]; + BUILTIN_INSTRUCTION_COSTS + .keys() + .for_each(|key| temp_table[key.as_ref()[0] as usize] = true); + temp_table + }; +} diff --git a/ci/localnet-sanity.sh b/ci/localnet-sanity.sh index b01eca31d50d81..3a301e4b31e89d 100755 --- a/ci/localnet-sanity.sh +++ b/ci/localnet-sanity.sh @@ -76,18 +76,18 @@ nodes=( "multinode-demo/bootstrap-validator.sh \ --no-restart \ --init-complete-file init-complete-node0.log \ - --dynamic-port-range 8000-8050" + --dynamic-port-range 8000-8200" "multinode-demo/validator.sh \ --no-restart \ - --dynamic-port-range 8050-8100 + --dynamic-port-range 8200-8400 --init-complete-file init-complete-node1.log \ --rpc-port 18899" ) if [[ extraNodes -gt 0 ]]; then for i in $(seq 1 $extraNodes); do - portStart=$((8100 + i * 50)) - portEnd=$((portStart + 49)) + portStart=$((8400 + i * 200)) + portEnd=$((portStart + 200)) nodes+=( "multinode-demo/validator.sh \ --no-restart \ diff --git a/core/src/validator.rs b/core/src/validator.rs index 373eebf4ae1be2..016514dd817166 100644 --- a/core/src/validator.rs +++ b/core/src/validator.rs @@ -138,7 +138,7 @@ use { time::{Duration, Instant}, }, strum::VariantNames, - strum_macros::{Display, EnumString, EnumVariantNames, IntoStaticStr}, + strum_macros::{Display, EnumCount, EnumIter, EnumString, EnumVariantNames, IntoStaticStr}, thiserror::Error, tokio::runtime::Runtime as TokioRuntime, }; @@ -151,11 +151,13 @@ const WAIT_FOR_SUPERMAJORITY_THRESHOLD_PERCENT: u64 = 80; const WAIT_FOR_WEN_RESTART_SUPERMAJORITY_THRESHOLD_PERCENT: u64 = WAIT_FOR_SUPERMAJORITY_THRESHOLD_PERCENT; -#[derive(Clone, EnumString, EnumVariantNames, Default, IntoStaticStr, Display)] +#[derive( + Clone, EnumCount, EnumIter, EnumString, EnumVariantNames, Default, IntoStaticStr, Display, +)] #[strum(serialize_all = "kebab-case")] pub enum BlockVerificationMethod { - #[default] BlockstoreProcessor, + #[default] UnifiedScheduler, } diff --git a/core/src/window_service.rs b/core/src/window_service.rs index 511da236958af3..ff902e414ee017 100644 --- a/core/src/window_service.rs +++ b/core/src/window_service.rs @@ -166,21 +166,8 @@ fn run_check_duplicate( ); let (shred1, shred2) = match shred { PossibleDuplicateShred::LastIndexConflict(shred, conflict) - | PossibleDuplicateShred::ErasureConflict(shred, conflict) => (shred, conflict), - PossibleDuplicateShred::MerkleRootConflict(shred, conflict) => { - // Although this proof can be immediately stored on detection, we wait until - // here in order to check the feature flag, as storage in blockstore can - // preclude the detection of other duplicate proofs in this slot - if blockstore.has_duplicate_shreds_in_slot(shred_slot) { - return Ok(()); - } - blockstore.store_duplicate_slot( - shred_slot, - conflict.clone(), - shred.clone().into_payload(), - )?; - (shred, conflict) - } + | PossibleDuplicateShred::ErasureConflict(shred, conflict) + | PossibleDuplicateShred::MerkleRootConflict(shred, conflict) => (shred, conflict), PossibleDuplicateShred::ChainedMerkleRootConflict(shred, conflict) => { if chained_merkle_conflict_duplicate_proofs { // Although this proof can be immediately stored on detection, we wait until diff --git a/gossip/src/cluster_info.rs b/gossip/src/cluster_info.rs index 07f7486d44260d..850346c8e0e9d0 100644 --- a/gossip/src/cluster_info.rs +++ b/gossip/src/cluster_info.rs @@ -419,7 +419,11 @@ impl Sanitize for Protocol { // Retains only CRDS values associated with nodes with enough stake. // (some crds types are exempted) -fn retain_staked(values: &mut Vec, stakes: &HashMap) { +fn retain_staked( + values: &mut Vec, + stakes: &HashMap, + drop_unstaked_node_instance: bool, +) { values.retain(|value| { match value.data { CrdsData::ContactInfo(_) => true, @@ -434,6 +438,7 @@ fn retain_staked(values: &mut Vec, stakes: &HashMap) { // the various dashboards. CrdsData::Version(_) => true, CrdsData::AccountsHashes(_) => true, + CrdsData::NodeInstance(_) if !drop_unstaked_node_instance => true, CrdsData::LowestSlot(_, _) | CrdsData::LegacyVersion(_) | CrdsData::DuplicateShred(_, _) @@ -1646,7 +1651,7 @@ impl ClusterInfo { .add_relaxed(num_nodes as u64); if self.require_stake_for_gossip(stakes) { push_messages.retain(|_, data| { - retain_staked(data, stakes); + retain_staked(data, stakes, /* drop_unstaked_node_instance */ false); !data.is_empty() }) } @@ -2138,7 +2143,7 @@ impl ClusterInfo { }; if self.require_stake_for_gossip(stakes) { for resp in &mut pull_responses { - retain_staked(resp, stakes); + retain_staked(resp, stakes, /* drop_unstaked_node_instance */ true); } } let (responses, scores): (Vec<_>, Vec<_>) = addrs @@ -2544,9 +2549,13 @@ impl ClusterInfo { } } if self.require_stake_for_gossip(stakes) { - retain_staked(&mut pull_responses, stakes); + retain_staked( + &mut pull_responses, + stakes, + /* drop_unstaked_node_instance */ false, + ); for (_, data) in &mut push_messages { - retain_staked(data, stakes); + retain_staked(data, stakes, /* drop_unstaked_node_instance */ false); } push_messages.retain(|(_, data)| !data.is_empty()); } diff --git a/gossip/src/contact_info.rs b/gossip/src/contact_info.rs index 09105909b7ab9b..b745db31f43692 100644 --- a/gossip/src/contact_info.rs +++ b/gossip/src/contact_info.rs @@ -212,7 +212,7 @@ impl ContactInfo { &self.version } - pub(crate) fn hot_swap_pubkey(&mut self, pubkey: Pubkey) { + pub fn hot_swap_pubkey(&mut self, pubkey: Pubkey) { self.pubkey = pubkey; // Need to update ContactInfo.outset so that this node's contact-info // will override older node with the same pubkey. diff --git a/ledger-tool/src/main.rs b/ledger-tool/src/main.rs index 06f17a55e03a2a..d9a3a60d2f4600 100644 --- a/ledger-tool/src/main.rs +++ b/ledger-tool/src/main.rs @@ -571,13 +571,13 @@ fn setup_slot_recording( exit(1); }); - let mut include_bank = false; + let mut include_bank_hash_components = false; let mut include_tx = false; if let Some(args) = arg_matches.values_of("record_slots_config") { for arg in args { match arg { "tx" => include_tx = true, - "accounts" => include_bank = true, + "accounts" => include_bank_hash_components = true, _ => unreachable!(), } } @@ -603,16 +603,11 @@ fn setup_slot_recording( let slot_callback = Arc::new({ let slots = Arc::clone(&slot_details); move |bank: &Bank| { - let mut details = if include_bank { - bank_hash_details::SlotDetails::try_from(bank).unwrap() - } else { - bank_hash_details::SlotDetails { - slot: bank.slot(), - bank_hash: bank.hash().to_string(), - ..Default::default() - } - }; - + let mut details = bank_hash_details::SlotDetails::new_from_bank( + bank, + include_bank_hash_components, + ) + .unwrap(); let mut slots = slots.lock().unwrap(); if let Some(recorded_slot) = slots.iter_mut().find(|f| f.slot == details.slot) { diff --git a/ledger/src/blockstore.rs b/ledger/src/blockstore.rs index 55377cb31557eb..2101896d9a0558 100644 --- a/ledger/src/blockstore.rs +++ b/ledger/src/blockstore.rs @@ -1811,6 +1811,17 @@ impl Blockstore { ); return true; }; + if let Err(e) = self.store_duplicate_slot( + slot, + conflicting_shred.clone(), + shred.clone().into_payload(), + ) { + warn!( + "Unable to store conflicting merkle root duplicate proof for {slot} \ + {:?} {e}", + shred.erasure_set(), + ); + } duplicate_shreds.push(PossibleDuplicateShred::MerkleRootConflict( shred.clone(), conflicting_shred, @@ -2868,12 +2879,11 @@ impl Blockstore { } } - pub fn write_transaction_status( + pub fn write_transaction_status<'a>( &self, slot: Slot, signature: Signature, - writable_keys: Vec<&Pubkey>, - readonly_keys: Vec<&Pubkey>, + keys_with_writable: impl Iterator, status: TransactionStatusMeta, transaction_index: usize, ) -> Result<()> { @@ -2882,18 +2892,14 @@ impl Blockstore { .map_err(|_| BlockstoreError::TransactionIndexOverflow)?; self.transaction_status_cf .put_protobuf((signature, slot), &status)?; - for address in writable_keys { - self.address_signatures_cf.put( - (*address, slot, transaction_index, signature), - &AddressSignatureMeta { writeable: true }, - )?; - } - for address in readonly_keys { + + for (address, writeable) in keys_with_writable { self.address_signatures_cf.put( (*address, slot, transaction_index, signature), - &AddressSignatureMeta { writeable: false }, + &AddressSignatureMeta { writeable }, )?; } + Ok(()) } @@ -8673,8 +8679,11 @@ pub mod tests { .write_transaction_status( slot, signature, - vec![&Pubkey::new_unique()], - vec![&Pubkey::new_unique()], + vec![ + (&Pubkey::new_unique(), true), + (&Pubkey::new_unique(), false), + ] + .into_iter(), TransactionStatusMeta { fee: slot * 1_000, ..TransactionStatusMeta::default() @@ -9061,8 +9070,7 @@ pub mod tests { .write_transaction_status( lowest_cleanup_slot, signature1, - vec![&address0], - vec![], + vec![(&address0, true)].into_iter(), TransactionStatusMeta::default(), 0, ) @@ -9071,8 +9079,7 @@ pub mod tests { .write_transaction_status( lowest_available_slot, signature2, - vec![&address1], - vec![], + vec![(&address1, true)].into_iter(), TransactionStatusMeta::default(), 0, ) @@ -9440,8 +9447,7 @@ pub mod tests { .write_transaction_status( slot1, signature, - vec![&address0], - vec![&address1], + vec![(&address0, true), (&address1, false)].into_iter(), TransactionStatusMeta::default(), x as usize, ) @@ -9454,8 +9460,7 @@ pub mod tests { .write_transaction_status( slot2, signature, - vec![&address0], - vec![&address1], + vec![(&address0, true), (&address1, false)].into_iter(), TransactionStatusMeta::default(), x as usize, ) @@ -9467,8 +9472,7 @@ pub mod tests { .write_transaction_status( slot2, signature, - vec![&address0], - vec![&address1], + vec![(&address0, true), (&address1, false)].into_iter(), TransactionStatusMeta::default(), x as usize, ) @@ -9481,8 +9485,7 @@ pub mod tests { .write_transaction_status( slot3, signature, - vec![&address0], - vec![&address1], + vec![(&address0, true), (&address1, false)].into_iter(), TransactionStatusMeta::default(), x as usize, ) @@ -9565,8 +9568,11 @@ pub mod tests { .write_transaction_status( slot, transaction.signatures[0], - transaction.message.static_account_keys().iter().collect(), - vec![], + transaction + .message + .static_account_keys() + .iter() + .map(|key| (key, true)), TransactionStatusMeta::default(), counter, ) @@ -9593,8 +9599,11 @@ pub mod tests { .write_transaction_status( slot, transaction.signatures[0], - transaction.message.static_account_keys().iter().collect(), - vec![], + transaction + .message + .static_account_keys() + .iter() + .map(|key| (key, true)), TransactionStatusMeta::default(), counter, ) diff --git a/ledger/src/blockstore/blockstore_purge.rs b/ledger/src/blockstore/blockstore_purge.rs index d442732303fa2a..b2d79c2bf59672 100644 --- a/ledger/src/blockstore/blockstore_purge.rs +++ b/ledger/src/blockstore/blockstore_purge.rs @@ -582,8 +582,11 @@ pub mod tests { .write_transaction_status( x, Signature::from(random_bytes), - vec![&Pubkey::try_from(&random_bytes[..32]).unwrap()], - vec![&Pubkey::try_from(&random_bytes[32..]).unwrap()], + vec![ + (&Pubkey::try_from(&random_bytes[..32]).unwrap(), true), + (&Pubkey::try_from(&random_bytes[32..]).unwrap(), false), + ] + .into_iter(), TransactionStatusMeta::default(), 0, ) @@ -640,8 +643,11 @@ pub mod tests { .write_transaction_status( x, signature, - vec![&Pubkey::try_from(&random_bytes[..32]).unwrap()], - vec![&Pubkey::try_from(&random_bytes[32..]).unwrap()], + vec![ + (&Pubkey::try_from(&random_bytes[..32]).unwrap(), true), + (&Pubkey::try_from(&random_bytes[32..]).unwrap(), false), + ] + .into_iter(), TransactionStatusMeta::default(), 0, ) @@ -715,8 +721,11 @@ pub mod tests { .write_transaction_status( slot, transaction.signatures[0], - transaction.message.static_account_keys().iter().collect(), - vec![], + transaction + .message + .static_account_keys() + .iter() + .map(|key| (key, true)), TransactionStatusMeta::default(), 0, ) diff --git a/local-cluster/Cargo.toml b/local-cluster/Cargo.toml index 3e4cbc0e366531..cd8e2bf6523152 100644 --- a/local-cluster/Cargo.toml +++ b/local-cluster/Cargo.toml @@ -37,6 +37,7 @@ solana-turbine = { workspace = true } solana-vote = { workspace = true } solana-vote-program = { workspace = true } static_assertions = { workspace = true } +strum = { workspace = true, features = ["derive"] } tempfile = { workspace = true } trees = { workspace = true } diff --git a/local-cluster/tests/local_cluster.rs b/local-cluster/tests/local_cluster.rs index 1e62835f91b1a2..a4c767e22ede52 100644 --- a/local-cluster/tests/local_cluster.rs +++ b/local-cluster/tests/local_cluster.rs @@ -5,7 +5,7 @@ use { gag::BufferRedirect, itertools::Itertools, log::*, - rand::seq::IteratorRandom, + rand::seq::SliceRandom, serial_test::serial, solana_accounts_db::{ hardened_unpack::open_genesis_config, utils::create_accounts_run_and_snapshot_dirs, @@ -95,6 +95,7 @@ use { thread::{sleep, Builder, JoinHandle}, time::{Duration, Instant}, }, + strum::{EnumCount, IntoEnumIterator}, }; #[test] @@ -5710,20 +5711,19 @@ fn test_randomly_mixed_block_verification_methods_between_bootstrap_and_not() { info", ); - let num_nodes = 2; + let num_nodes = BlockVerificationMethod::COUNT; let mut config = ClusterConfig::new_with_equal_stakes( num_nodes, DEFAULT_CLUSTER_LAMPORTS, DEFAULT_NODE_STAKE, ); - // Randomly switch to use unified scheduler - config - .validator_configs - .iter_mut() - .choose(&mut rand::thread_rng()) - .unwrap() - .block_verification_method = BlockVerificationMethod::UnifiedScheduler; + // Overwrite block_verification_method with shuffled variants + let mut methods = BlockVerificationMethod::iter().collect::>(); + methods.shuffle(&mut rand::thread_rng()); + for (validator_config, method) in config.validator_configs.iter_mut().zip_eq(methods) { + validator_config.block_verification_method = method; + } let local = LocalCluster::new(&mut config, SocketAddrSpace::Unspecified); cluster_tests::spend_and_verify_all_nodes( diff --git a/programs/loader-v4/src/lib.rs b/programs/loader-v4/src/lib.rs index f44031f6f15021..91b15e33b27755 100644 --- a/programs/loader-v4/src/lib.rs +++ b/programs/loader-v4/src/lib.rs @@ -5,7 +5,7 @@ use { solana_program_runtime::{ invoke_context::InvokeContext, loaded_programs::{ - LoadProgramMetrics, ProgramCacheEntry, ProgramCacheEntryType, + LoadProgramMetrics, ProgramCacheEntry, ProgramCacheEntryOwner, ProgramCacheEntryType, DELAY_VISIBILITY_SLOT_OFFSET, }, stable_log, @@ -199,10 +199,6 @@ fn check_program_account( ic_logger_msg!(log_collector, "Program not owned by loader"); return Err(InstructionError::InvalidAccountOwner); } - if program.get_data().is_empty() { - ic_logger_msg!(log_collector, "Program is uninitialized"); - return Err(InstructionError::InvalidAccountData); - } let state = get_state(program.get_data())?; if !program.is_writable() { ic_logger_msg!(log_collector, "Program is not writeable"); @@ -488,12 +484,22 @@ pub fn process_instruction_retract( ); return Err(InstructionError::InvalidArgument); } - if matches!(state.status, LoaderV4Status::Retracted) { + if !matches!(state.status, LoaderV4Status::Deployed) { ic_logger_msg!(log_collector, "Program is not deployed"); return Err(InstructionError::InvalidArgument); } let state = get_state_mut(program.get_data_mut()?)?; state.status = LoaderV4Status::Retracted; + invoke_context + .program_cache_for_tx_batch + .store_modified_entry( + *program.get_key(), + Arc::new(ProgramCacheEntry::new_tombstone( + current_slot, + ProgramCacheEntryOwner::LoaderV4, + ProgramCacheEntryType::Closed, + )), + ); Ok(()) } @@ -518,12 +524,16 @@ pub fn process_instruction_transfer_authority( &program, authority_address, )?; - if new_authority_address.is_some() && !instruction_context.is_instruction_account_signer(2)? { - ic_logger_msg!(log_collector, "New authority did not sign"); - return Err(InstructionError::MissingRequiredSignature); - } let state = get_state_mut(program.get_data_mut()?)?; if let Some(new_authority_address) = new_authority_address { + if !instruction_context.is_instruction_account_signer(2)? { + ic_logger_msg!(log_collector, "New authority did not sign"); + return Err(InstructionError::MissingRequiredSignature); + } + if state.authority_address == new_authority_address { + ic_logger_msg!(log_collector, "No change"); + return Err(InstructionError::InvalidArgument); + } state.authority_address = new_authority_address; } else if matches!(state.status, LoaderV4Status::Deployed) { state.status = LoaderV4Status::Finalized; @@ -575,18 +585,10 @@ pub fn process_instruction_inner( .map_err(|err| Box::new(err) as Box) } else { let program = instruction_context.try_borrow_last_program_account(transaction_context)?; - if !loader_v4::check_id(program.get_owner()) { - ic_logger_msg!(log_collector, "Program not owned by loader"); - return Err(Box::new(InstructionError::InvalidAccountOwner)); - } - if program.get_data().is_empty() { - ic_logger_msg!(log_collector, "Program is uninitialized"); - return Err(Box::new(InstructionError::InvalidAccountData)); - } let state = get_state(program.get_data())?; if matches!(state.status, LoaderV4Status::Retracted) { - ic_logger_msg!(log_collector, "Program is not deployed"); - return Err(Box::new(InstructionError::InvalidArgument)); + ic_logger_msg!(log_collector, "Program is retracted"); + return Err(Box::new(InstructionError::UnsupportedProgramId)); } let mut get_or_create_executor_time = Measure::start("get_or_create_executor_time"); let loaded_program = invoke_context @@ -594,7 +596,7 @@ pub fn process_instruction_inner( .find(program.get_key()) .ok_or_else(|| { ic_logger_msg!(log_collector, "Program is not cached"); - InstructionError::InvalidAccountData + InstructionError::UnsupportedProgramId })?; get_or_create_executor_time.stop(); saturating_add_assign!( @@ -610,10 +612,12 @@ pub fn process_instruction_inner( | ProgramCacheEntryType::Closed | ProgramCacheEntryType::DelayVisibility => { ic_logger_msg!(log_collector, "Program is not deployed"); - Err(Box::new(InstructionError::InvalidAccountData) as Box) + Err(Box::new(InstructionError::UnsupportedProgramId) as Box) } ProgramCacheEntryType::Loaded(executable) => execute(invoke_context, executable), - _ => Err(Box::new(InstructionError::IncorrectProgramId) as Box), + _ => { + Err(Box::new(InstructionError::UnsupportedProgramId) as Box) + } } } .map(|_| 0) @@ -1157,7 +1161,7 @@ mod tests { &bincode::serialize(&LoaderV4Instruction::Truncate { new_size: 0 }).unwrap(), transaction_accounts.clone(), &[(3, false, true), (1, true, false), (2, true, true)], - Err(InstructionError::InvalidAccountData), + Err(InstructionError::AccountDataTooSmall), ); // Error: Program is not retracted @@ -1331,7 +1335,7 @@ mod tests { &bincode::serialize(&LoaderV4Instruction::Deploy).unwrap(), transaction_accounts.clone(), &[(3, false, true), (1, true, false)], - Err(InstructionError::InvalidAccountData), + Err(InstructionError::AccountDataTooSmall), ); // Error: Program fails verification @@ -1410,7 +1414,7 @@ mod tests { &bincode::serialize(&LoaderV4Instruction::Retract).unwrap(), transaction_accounts.clone(), &[(2, false, true), (1, true, false)], - Err(InstructionError::InvalidAccountData), + Err(InstructionError::AccountDataTooSmall), ); // Error: Program is not deployed @@ -1520,18 +1524,27 @@ mod tests { &bincode::serialize(&LoaderV4Instruction::TransferAuthority).unwrap(), transaction_accounts.clone(), &[(2, false, true), (3, true, false), (4, true, false)], - Err(InstructionError::InvalidAccountData), + Err(InstructionError::AccountDataTooSmall), ); // Error: New authority did not sign process_instruction( vec![], &bincode::serialize(&LoaderV4Instruction::TransferAuthority).unwrap(), - transaction_accounts, + transaction_accounts.clone(), &[(0, false, true), (3, true, false), (4, false, false)], Err(InstructionError::MissingRequiredSignature), ); + // Error: Authority did not change + process_instruction( + vec![], + &bincode::serialize(&LoaderV4Instruction::TransferAuthority).unwrap(), + transaction_accounts, + &[(0, false, true), (3, true, false), (3, true, false)], + Err(InstructionError::InvalidArgument), + ); + test_loader_instruction_general_errors(LoaderV4Instruction::TransferAuthority); } @@ -1598,7 +1611,7 @@ mod tests { &[0, 1, 2, 3], transaction_accounts.clone(), &[(1, false, true)], - Err(InstructionError::InvalidAccountData), + Err(InstructionError::AccountDataTooSmall), ); // Error: Program is not deployed @@ -1607,7 +1620,7 @@ mod tests { &[0, 1, 2, 3], transaction_accounts.clone(), &[(1, false, true)], - Err(InstructionError::InvalidArgument), + Err(InstructionError::UnsupportedProgramId), ); // Error: Program fails verification @@ -1616,7 +1629,7 @@ mod tests { &[0, 1, 2, 3], transaction_accounts, &[(1, false, true)], - Err(InstructionError::InvalidAccountData), + Err(InstructionError::UnsupportedProgramId), ); } } diff --git a/programs/sbf/Cargo.lock b/programs/sbf/Cargo.lock index 512aff5dc21cb6..f78add5c5688cb 100644 --- a/programs/sbf/Cargo.lock +++ b/programs/sbf/Cargo.lock @@ -73,6 +73,14 @@ dependencies = [ "thiserror", ] +[[package]] +name = "agave-transaction-view" +version = "2.1.0" +dependencies = [ + "solana-sdk", + "solana-svm-transaction", +] + [[package]] name = "agave-validator" version = "2.1.0" @@ -382,9 +390,9 @@ checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" [[package]] name = "arrayvec" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "ascii" @@ -847,18 +855,18 @@ dependencies = [ [[package]] name = "bytemuck" -version = "1.16.3" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "102087e286b4677862ea56cf8fc58bb2cdfa8725c40ffb80fe3a008eb7f2fc83" +checksum = "6fd4c6dcc3b0aea2f5c0b4b82c2b15fe39ddbc76041a310848f4706edf76bb31" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.7.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee891b04274a59bd38b412188e24b849617b2e45a0fd8d057deb63e7403761b" +checksum = "0cc8b54b395f2fcfbb3d90c47b01c7f444d94d05bdeb775811dec868ac3bbc26" dependencies = [ "proc-macro2", "quote", @@ -2541,9 +2549,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.155" +version = "0.2.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" [[package]] name = "libloading" @@ -4226,9 +4234,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.207" +version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5665e14a49a4ea1b91029ba7d3bca9f299e1f7cfa194388ccc20f14743e784f2" +checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2" dependencies = [ "serde_derive", ] @@ -4244,9 +4252,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.207" +version = "1.0.208" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aea2634c86b0e8ef2cfdc0c340baede54ec27b1e46febd7f80dffb2aa44a00e" +checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" dependencies = [ "proc-macro2", "quote", @@ -4255,9 +4263,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.124" +version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d" +checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed" dependencies = [ "itoa", "memchr", @@ -5736,7 +5744,9 @@ dependencies = [ name = "solana-runtime-transaction" version = "2.1.0" dependencies = [ + "agave-transaction-view", "log", + "solana-builtins-default-costs", "solana-compute-budget", "solana-sdk", "solana-svm-transaction", diff --git a/rpc/src/transaction_status_service.rs b/rpc/src/transaction_status_service.rs index 43ce83c0966de6..314f8b9a4f5fda 100644 --- a/rpc/src/transaction_status_service.rs +++ b/rpc/src/transaction_status_service.rs @@ -104,7 +104,6 @@ impl TransactionStatusService { rent_debits, .. } = committed_tx; - let tx_account_locks = transaction.get_account_locks_unchecked(); let fee = fee_details.total_fee(); let inner_instructions = inner_instructions.map(|inner_instructions| { @@ -164,12 +163,18 @@ impl TransactionStatusService { .expect("Expect database write to succeed: TransactionMemos"); } + let message = transaction.message(); + let keys_with_writable = message + .account_keys() + .iter() + .enumerate() + .map(|(index, key)| (key, message.is_writable(index))); + blockstore .write_transaction_status( slot, *transaction.signature(), - tx_account_locks.writable, - tx_account_locks.readonly, + keys_with_writable, transaction_status_meta, transaction_index, ) diff --git a/runtime-transaction/Cargo.toml b/runtime-transaction/Cargo.toml index 32e42cefcf2f29..69d172ec112c7c 100644 --- a/runtime-transaction/Cargo.toml +++ b/runtime-transaction/Cargo.toml @@ -10,7 +10,9 @@ license = { workspace = true } edition = { workspace = true } [dependencies] +agave-transaction-view = { workspace = true } log = { workspace = true } +solana-builtins-default-costs = { workspace = true } solana-compute-budget = { workspace = true } solana-sdk = { workspace = true } solana-svm-transaction = { workspace = true } @@ -24,7 +26,6 @@ name = "solana_runtime_transaction" bincode = { workspace = true } criterion = { workspace = true } rand = { workspace = true } -solana-builtins-default-costs = { workspace = true } solana-program = { workspace = true } [package.metadata.docs.rs] diff --git a/runtime-transaction/src/compute_budget_instruction_details.rs b/runtime-transaction/src/compute_budget_instruction_details.rs index 993c3905f6c101..ab148ef14ae152 100644 --- a/runtime-transaction/src/compute_budget_instruction_details.rs +++ b/runtime-transaction/src/compute_budget_instruction_details.rs @@ -1,8 +1,9 @@ use { + crate::compute_budget_program_id_filter::ComputeBudgetProgramIdFilter, solana_compute_budget::compute_budget_limits::*, solana_sdk::{ borsh1::try_from_slice_unchecked, - compute_budget::{self, ComputeBudgetInstruction}, + compute_budget::ComputeBudgetInstruction, instruction::InstructionError, pubkey::Pubkey, saturating_add_assign, @@ -28,13 +29,18 @@ impl ComputeBudgetInstructionDetails { pub fn try_from<'a>( instructions: impl Iterator)>, ) -> Result { + let mut filter = ComputeBudgetProgramIdFilter::new(); + let mut compute_budget_instruction_details = ComputeBudgetInstructionDetails::default(); for (i, (program_id, instruction)) in instructions.enumerate() { - compute_budget_instruction_details.process_instruction( - i as u8, - program_id, - &instruction, - )?; + if filter.is_compute_budget_program(instruction.program_id_index as usize, program_id) { + compute_budget_instruction_details.process_instruction(i as u8, &instruction)?; + } else { + saturating_add_assign!( + compute_budget_instruction_details.num_non_compute_budget_instructions, + 1 + ); + } } Ok(compute_budget_instruction_details) @@ -94,46 +100,37 @@ impl ComputeBudgetInstructionDetails { }) } - fn process_instruction( - &mut self, - index: u8, - program_id: &Pubkey, - instruction: &SVMInstruction, - ) -> Result<()> { - if compute_budget::check_id(program_id) { - let invalid_instruction_data_error = - TransactionError::InstructionError(index, InstructionError::InvalidInstructionData); - let duplicate_instruction_error = TransactionError::DuplicateInstruction(index); - - match try_from_slice_unchecked(instruction.data) { - Ok(ComputeBudgetInstruction::RequestHeapFrame(bytes)) => { - if self.requested_heap_size.is_some() { - return Err(duplicate_instruction_error); - } - self.requested_heap_size = Some((index, bytes)); + fn process_instruction(&mut self, index: u8, instruction: &SVMInstruction) -> Result<()> { + let invalid_instruction_data_error = + TransactionError::InstructionError(index, InstructionError::InvalidInstructionData); + let duplicate_instruction_error = TransactionError::DuplicateInstruction(index); + + match try_from_slice_unchecked(instruction.data) { + Ok(ComputeBudgetInstruction::RequestHeapFrame(bytes)) => { + if self.requested_heap_size.is_some() { + return Err(duplicate_instruction_error); } - Ok(ComputeBudgetInstruction::SetComputeUnitLimit(compute_unit_limit)) => { - if self.requested_compute_unit_limit.is_some() { - return Err(duplicate_instruction_error); - } - self.requested_compute_unit_limit = Some((index, compute_unit_limit)); + self.requested_heap_size = Some((index, bytes)); + } + Ok(ComputeBudgetInstruction::SetComputeUnitLimit(compute_unit_limit)) => { + if self.requested_compute_unit_limit.is_some() { + return Err(duplicate_instruction_error); } - Ok(ComputeBudgetInstruction::SetComputeUnitPrice(micro_lamports)) => { - if self.requested_compute_unit_price.is_some() { - return Err(duplicate_instruction_error); - } - self.requested_compute_unit_price = Some((index, micro_lamports)); + self.requested_compute_unit_limit = Some((index, compute_unit_limit)); + } + Ok(ComputeBudgetInstruction::SetComputeUnitPrice(micro_lamports)) => { + if self.requested_compute_unit_price.is_some() { + return Err(duplicate_instruction_error); } - Ok(ComputeBudgetInstruction::SetLoadedAccountsDataSizeLimit(bytes)) => { - if self.requested_loaded_accounts_data_size_limit.is_some() { - return Err(duplicate_instruction_error); - } - self.requested_loaded_accounts_data_size_limit = Some((index, bytes)); + self.requested_compute_unit_price = Some((index, micro_lamports)); + } + Ok(ComputeBudgetInstruction::SetLoadedAccountsDataSizeLimit(bytes)) => { + if self.requested_loaded_accounts_data_size_limit.is_some() { + return Err(duplicate_instruction_error); } - _ => return Err(invalid_instruction_data_error), + self.requested_loaded_accounts_data_size_limit = Some((index, bytes)); } - } else { - saturating_add_assign!(self.num_non_compute_budget_instructions, 1); + _ => return Err(invalid_instruction_data_error), } Ok(()) @@ -148,256 +145,135 @@ impl ComputeBudgetInstructionDetails { mod test { use { super::*, - solana_sdk::instruction::{CompiledInstruction, Instruction}, + solana_sdk::{ + instruction::Instruction, + message::Message, + pubkey::Pubkey, + signature::Keypair, + signer::Signer, + transaction::{SanitizedTransaction, Transaction}, + }, + solana_svm_transaction::svm_message::SVMMessage, }; - fn setup_test_instruction( - index: u8, - instruction: Instruction, - ) -> (Pubkey, CompiledInstruction) { - ( - instruction.program_id, - CompiledInstruction { - program_id_index: index, - data: instruction.data.clone(), - accounts: vec![], - }, - ) + fn build_sanitized_transaction(instructions: &[Instruction]) -> SanitizedTransaction { + let payer_keypair = Keypair::new(); + SanitizedTransaction::from_transaction_for_tests(Transaction::new_unsigned(Message::new( + instructions, + Some(&payer_keypair.pubkey()), + ))) } #[test] - fn test_process_instruction_request_heap() { - let mut index = 0; - let mut expected_details = ComputeBudgetInstructionDetails::default(); - let mut compute_budget_instruction_details = ComputeBudgetInstructionDetails::default(); - - // irrelevant instruction makes no change - index += 1; - let (program_id, ix) = setup_test_instruction( - index, - Instruction::new_with_bincode(Pubkey::new_unique(), &0_u8, vec![]), - ); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - expected_details.num_non_compute_budget_instructions = 1; - assert_eq!(compute_budget_instruction_details, expected_details); - - // valid instruction - index += 1; - let (program_id, ix) = setup_test_instruction( - index, + fn test_try_from_request_heap() { + let tx = build_sanitized_transaction(&[ + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), ComputeBudgetInstruction::request_heap_frame(40 * 1024), - ); - expected_details.requested_heap_size = Some((index, 40 * 1024)); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - assert_eq!(compute_budget_instruction_details, expected_details); - - // duplicate instruction results error - index += 1; - let expected_err = Err(TransactionError::DuplicateInstruction(index)); - let (program_id, ix) = setup_test_instruction( - index, - ComputeBudgetInstruction::request_heap_frame(50 * 1024), - ); + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), + ]); + let expected_details = ComputeBudgetInstructionDetails { + requested_heap_size: Some((1, 40 * 1024)), + num_non_compute_budget_instructions: 2, + ..ComputeBudgetInstructionDetails::default() + }; assert_eq!( - compute_budget_instruction_details.process_instruction( - index, - &program_id, - &SVMInstruction::from(&ix) - ), - expected_err + ComputeBudgetInstructionDetails::try_from(SVMMessage::program_instructions_iter(&tx)), + Ok(expected_details) ); - assert_eq!(compute_budget_instruction_details, expected_details); - // irrelevant instruction makes no change - index += 1; - let (program_id, ix) = setup_test_instruction( - index, - Instruction::new_with_bincode(Pubkey::new_unique(), &0_u8, vec![]), + let tx = build_sanitized_transaction(&[ + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), + ComputeBudgetInstruction::request_heap_frame(40 * 1024), + ComputeBudgetInstruction::request_heap_frame(41 * 1024), + ]); + assert_eq!( + ComputeBudgetInstructionDetails::try_from(SVMMessage::program_instructions_iter(&tx)), + Err(TransactionError::DuplicateInstruction(2)) ); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - expected_details.num_non_compute_budget_instructions += 1; - assert_eq!(compute_budget_instruction_details, expected_details); } #[test] - fn test_process_instruction_compute_unit_limit() { - let mut index = 0; - let mut expected_details = ComputeBudgetInstructionDetails::default(); - let mut compute_budget_instruction_details = ComputeBudgetInstructionDetails::default(); - - // irrelevant instruction makes no change - let (program_id, ix) = setup_test_instruction( - index, - Instruction::new_with_bincode(Pubkey::new_unique(), &0_u8, vec![]), - ); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - expected_details.num_non_compute_budget_instructions = 1; - assert_eq!(compute_budget_instruction_details, expected_details); - - // valid instruction, - index += 1; - let (program_id, ix) = setup_test_instruction( - index, + fn test_try_from_compute_unit_limit() { + let tx = build_sanitized_transaction(&[ + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), ComputeBudgetInstruction::set_compute_unit_limit(u32::MAX), - ); - expected_details.requested_compute_unit_limit = Some((index, u32::MAX)); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - assert_eq!(compute_budget_instruction_details, expected_details); - - // duplicate instruction results error - index += 1; - let expected_err = Err(TransactionError::DuplicateInstruction(index)); - let (program_id, ix) = setup_test_instruction( - index, - ComputeBudgetInstruction::set_compute_unit_limit(MAX_COMPUTE_UNIT_LIMIT), - ); + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), + ]); + let expected_details = ComputeBudgetInstructionDetails { + requested_compute_unit_limit: Some((1, u32::MAX)), + num_non_compute_budget_instructions: 2, + ..ComputeBudgetInstructionDetails::default() + }; assert_eq!( - compute_budget_instruction_details.process_instruction( - index, - &program_id, - &SVMInstruction::from(&ix) - ), - expected_err + ComputeBudgetInstructionDetails::try_from(SVMMessage::program_instructions_iter(&tx)), + Ok(expected_details) ); - assert_eq!(compute_budget_instruction_details, expected_details); - // irrelevant instruction makes no change - index += 1; - let (program_id, ix) = setup_test_instruction( - index, - Instruction::new_with_bincode(Pubkey::new_unique(), &0_u8, vec![]), + let tx = build_sanitized_transaction(&[ + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), + ComputeBudgetInstruction::set_compute_unit_limit(0), + ComputeBudgetInstruction::set_compute_unit_limit(u32::MAX), + ]); + assert_eq!( + ComputeBudgetInstructionDetails::try_from(SVMMessage::program_instructions_iter(&tx)), + Err(TransactionError::DuplicateInstruction(2)) ); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - expected_details.num_non_compute_budget_instructions += 1; - assert_eq!(compute_budget_instruction_details, expected_details); } #[test] - fn test_process_instruction_compute_unit_price() { - let mut index = 0; - let mut expected_details = ComputeBudgetInstructionDetails::default(); - let mut compute_budget_instruction_details = ComputeBudgetInstructionDetails::default(); - - // irrelevant instruction makes no change - let (program_id, ix) = setup_test_instruction( - index, - Instruction::new_with_bincode(Pubkey::new_unique(), &0_u8, vec![]), - ); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - expected_details.num_non_compute_budget_instructions = 1; - assert_eq!(compute_budget_instruction_details, expected_details); - - // valid instruction, - index += 1; - let (program_id, ix) = setup_test_instruction( - index, + fn test_try_from_compute_unit_price() { + let tx = build_sanitized_transaction(&[ + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), ComputeBudgetInstruction::set_compute_unit_price(u64::MAX), - ); - expected_details.requested_compute_unit_price = Some((index, u64::MAX)); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - assert_eq!(compute_budget_instruction_details, expected_details); - - // duplicate instruction results error - index += 1; - let expected_err = Err(TransactionError::DuplicateInstruction(index)); - let (program_id, ix) = - setup_test_instruction(index, ComputeBudgetInstruction::set_compute_unit_price(0)); + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), + ]); + let expected_details = ComputeBudgetInstructionDetails { + requested_compute_unit_price: Some((1, u64::MAX)), + num_non_compute_budget_instructions: 2, + ..ComputeBudgetInstructionDetails::default() + }; assert_eq!( - compute_budget_instruction_details.process_instruction( - index, - &program_id, - &SVMInstruction::from(&ix) - ), - expected_err + ComputeBudgetInstructionDetails::try_from(SVMMessage::program_instructions_iter(&tx)), + Ok(expected_details) ); - assert_eq!(compute_budget_instruction_details, expected_details); - // irrelevant instruction makes no change - index += 1; - let (program_id, ix) = setup_test_instruction( - index, - Instruction::new_with_bincode(Pubkey::new_unique(), &0_u8, vec![]), + let tx = build_sanitized_transaction(&[ + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), + ComputeBudgetInstruction::set_compute_unit_price(0), + ComputeBudgetInstruction::set_compute_unit_price(u64::MAX), + ]); + assert_eq!( + ComputeBudgetInstructionDetails::try_from(SVMMessage::program_instructions_iter(&tx)), + Err(TransactionError::DuplicateInstruction(2)) ); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - expected_details.num_non_compute_budget_instructions += 1; - assert_eq!(compute_budget_instruction_details, expected_details); } #[test] - fn test_process_instruction_loaded_accounts_data_size_limit() { - let mut index = 0; - let mut expected_details = ComputeBudgetInstructionDetails::default(); - let mut compute_budget_instruction_details = ComputeBudgetInstructionDetails::default(); - - // irrelevant instruction makes no change - let (program_id, ix) = setup_test_instruction( - index, - Instruction::new_with_bincode(Pubkey::new_unique(), &0_u8, vec![]), - ); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - expected_details.num_non_compute_budget_instructions = 1; - assert_eq!(compute_budget_instruction_details, expected_details); - - // valid instruction, - index += 1; - let (program_id, ix) = setup_test_instruction( - index, + fn test_try_from_loaded_accounts_data_size_limit() { + let tx = build_sanitized_transaction(&[ + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), ComputeBudgetInstruction::set_loaded_accounts_data_size_limit(u32::MAX), - ); - expected_details.requested_loaded_accounts_data_size_limit = Some((index, u32::MAX)); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - assert_eq!(compute_budget_instruction_details, expected_details); - - // duplicate instruction results error - index += 1; - let expected_err = Err(TransactionError::DuplicateInstruction(index)); - let (program_id, ix) = setup_test_instruction( - index, - ComputeBudgetInstruction::set_loaded_accounts_data_size_limit(0), - ); + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), + ]); + let expected_details = ComputeBudgetInstructionDetails { + requested_loaded_accounts_data_size_limit: Some((1, u32::MAX)), + num_non_compute_budget_instructions: 2, + ..ComputeBudgetInstructionDetails::default() + }; assert_eq!( - compute_budget_instruction_details.process_instruction( - index, - &program_id, - &SVMInstruction::from(&ix) - ), - expected_err + ComputeBudgetInstructionDetails::try_from(SVMMessage::program_instructions_iter(&tx)), + Ok(expected_details) ); - assert_eq!(compute_budget_instruction_details, expected_details); - // irrelevant instruction makes no change - index += 1; - let (program_id, ix) = setup_test_instruction( - index, - Instruction::new_with_bincode(Pubkey::new_unique(), &0_u8, vec![]), + let tx = build_sanitized_transaction(&[ + Instruction::new_with_bincode(Pubkey::new_unique(), &(), vec![]), + ComputeBudgetInstruction::set_loaded_accounts_data_size_limit(0), + ComputeBudgetInstruction::set_loaded_accounts_data_size_limit(u32::MAX), + ]); + assert_eq!( + ComputeBudgetInstructionDetails::try_from(SVMMessage::program_instructions_iter(&tx)), + Err(TransactionError::DuplicateInstruction(2)) ); - assert!(compute_budget_instruction_details - .process_instruction(index, &program_id, &SVMInstruction::from(&ix)) - .is_ok()); - expected_details.num_non_compute_budget_instructions += 1; - assert_eq!(compute_budget_instruction_details, expected_details); } #[test] diff --git a/runtime-transaction/src/compute_budget_program_id_filter.rs b/runtime-transaction/src/compute_budget_program_id_filter.rs new file mode 100644 index 00000000000000..b89b67113de105 --- /dev/null +++ b/runtime-transaction/src/compute_budget_program_id_filter.rs @@ -0,0 +1,37 @@ +// static account keys has max +use { + agave_transaction_view::static_account_keys_meta::MAX_STATIC_ACCOUNTS_PER_PACKET as FILTER_SIZE, + solana_builtins_default_costs::MAYBE_BUILTIN_KEY, solana_sdk::pubkey::Pubkey, +}; + +pub(crate) struct ComputeBudgetProgramIdFilter { + // array of slots for all possible static and sanitized program_id_index, + // each slot indicates if a program_id_index has not been checked (eg, None), + // or already checked with result (eg, Some(result)) that can be reused. + flags: [Option; FILTER_SIZE as usize], +} + +impl ComputeBudgetProgramIdFilter { + pub(crate) fn new() -> Self { + ComputeBudgetProgramIdFilter { + flags: [None; FILTER_SIZE as usize], + } + } + + #[inline] + pub(crate) fn is_compute_budget_program(&mut self, index: usize, program_id: &Pubkey) -> bool { + *self + .flags + .get_mut(index) + .expect("program id index is sanitized") + .get_or_insert_with(|| Self::check_program_id(program_id)) + } + + #[inline] + fn check_program_id(program_id: &Pubkey) -> bool { + if !MAYBE_BUILTIN_KEY[program_id.as_ref()[0] as usize] { + return false; + } + solana_sdk::compute_budget::check_id(program_id) + } +} diff --git a/runtime-transaction/src/lib.rs b/runtime-transaction/src/lib.rs index 011df606d59cf3..28d54b4eb3b6b8 100644 --- a/runtime-transaction/src/lib.rs +++ b/runtime-transaction/src/lib.rs @@ -2,6 +2,7 @@ #![allow(clippy::arithmetic_side_effects)] mod compute_budget_instruction_details; +mod compute_budget_program_id_filter; pub mod instructions_processor; pub mod runtime_transaction; pub mod transaction_meta; diff --git a/runtime-transaction/src/runtime_transaction.rs b/runtime-transaction/src/runtime_transaction.rs index 9c2f75a0868755..2a8772ce168977 100644 --- a/runtime-transaction/src/runtime_transaction.rs +++ b/runtime-transaction/src/runtime_transaction.rs @@ -17,20 +17,18 @@ use { solana_compute_budget::compute_budget_limits::ComputeBudgetLimits, solana_sdk::{ hash::Hash, - message::{AddressLoader, SanitizedMessage, SanitizedVersionedMessage}, + message::AddressLoader, pubkey::Pubkey, - signature::Signature, simple_vote_transaction_checker::is_simple_vote_transaction, - transaction::{Result, SanitizedVersionedTransaction}, + transaction::{Result, SanitizedTransaction, SanitizedVersionedTransaction}, }, solana_svm_transaction::instruction::SVMInstruction, std::collections::HashSet, }; #[derive(Debug, Clone, Eq, PartialEq)] -pub struct RuntimeTransaction { - signatures: Vec, - message: M, +pub struct RuntimeTransaction { + transaction: T, // transaction meta is a collection of fields, it is updated // during message state transition meta: TransactionMeta, @@ -44,11 +42,11 @@ trait DynamicMetaAccess: StaticMetaAccess {} // Implement the gate traits for the message types that should // have access to the static and dynamic metadata. -impl StaticMetaAccess for SanitizedVersionedMessage {} -impl StaticMetaAccess for SanitizedMessage {} -impl DynamicMetaAccess for SanitizedMessage {} +impl StaticMetaAccess for SanitizedVersionedTransaction {} +impl StaticMetaAccess for SanitizedTransaction {} +impl DynamicMetaAccess for SanitizedTransaction {} -impl StaticMeta for RuntimeTransaction { +impl StaticMeta for RuntimeTransaction { fn message_hash(&self) -> &Hash { &self.meta.message_hash } @@ -68,7 +66,7 @@ impl StaticMeta for RuntimeTransaction { impl DynamicMeta for RuntimeTransaction {} -impl RuntimeTransaction { +impl RuntimeTransaction { pub fn try_from( sanitized_versioned_tx: SanitizedVersionedTransaction, message_hash: Option, @@ -80,8 +78,9 @@ impl RuntimeTransaction { .unwrap_or_else(|| is_simple_vote_transaction(&sanitized_versioned_tx)), ); - let (signatures, message) = sanitized_versioned_tx.destruct(); - meta.set_message_hash(message_hash.unwrap_or_else(|| message.message.hash())); + meta.set_message_hash( + message_hash.unwrap_or_else(|| sanitized_versioned_tx.get_message().message.hash()), + ); let ComputeBudgetLimits { compute_unit_limit, @@ -89,7 +88,8 @@ impl RuntimeTransaction { loaded_accounts_bytes, .. } = process_compute_budget_instructions( - message + sanitized_versioned_tx + .get_message() .program_instructions_iter() .map(|(program_id, ix)| (program_id, SVMInstruction::from(ix))), )?; @@ -98,26 +98,30 @@ impl RuntimeTransaction { meta.set_loaded_accounts_bytes(loaded_accounts_bytes.get()); Ok(Self { - signatures, - message, + transaction: sanitized_versioned_tx, meta, }) } } -impl RuntimeTransaction { +impl RuntimeTransaction { pub fn try_from( - statically_loaded_runtime_tx: RuntimeTransaction, + statically_loaded_runtime_tx: RuntimeTransaction, address_loader: impl AddressLoader, reserved_account_keys: &HashSet, ) -> Result { + let hash = *statically_loaded_runtime_tx.message_hash(); + let is_simple_vote_tx = statically_loaded_runtime_tx.is_simple_vote_tx(); + let sanitized_transaction = SanitizedTransaction::try_new( + statically_loaded_runtime_tx.transaction, + hash, + is_simple_vote_tx, + address_loader, + reserved_account_keys, + )?; + let mut tx = Self { - signatures: statically_loaded_runtime_tx.signatures, - message: SanitizedMessage::try_new( - statically_loaded_runtime_tx.message, - address_loader, - reserved_account_keys, - )?, + transaction: sanitized_transaction, meta: statically_loaded_runtime_tx.meta, }; tx.load_dynamic_metadata()?; @@ -222,7 +226,7 @@ mod tests { svt: SanitizedVersionedTransaction, is_simple_vote: Option, ) -> bool { - RuntimeTransaction::::try_from(svt, None, is_simple_vote) + RuntimeTransaction::::try_from(svt, None, is_simple_vote) .unwrap() .meta .is_simple_vote_tx @@ -254,7 +258,7 @@ mod tests { let hash = Hash::new_unique(); let statically_loaded_transaction = - RuntimeTransaction::::try_from( + RuntimeTransaction::::try_from( non_vote_sanitized_versioned_transaction(), Some(hash), None, @@ -264,7 +268,7 @@ mod tests { assert_eq!(hash, *statically_loaded_transaction.message_hash()); assert!(!statically_loaded_transaction.is_simple_vote_tx()); - let dynamically_loaded_transaction = RuntimeTransaction::::try_from( + let dynamically_loaded_transaction = RuntimeTransaction::::try_from( statically_loaded_transaction, SimpleAddressLoader::Disabled, &ReservedAccountKeys::empty_key_set(), @@ -284,16 +288,17 @@ mod tests { let loaded_accounts_bytes = 1_024; let mut test_transaction = TestTransaction::new(); - let runtime_transaction_static = RuntimeTransaction::::try_from( - test_transaction - .add_compute_unit_limit(compute_unit_limit) - .add_compute_unit_price(compute_unit_price) - .add_loaded_accounts_bytes(loaded_accounts_bytes) - .to_sanitized_versioned_transaction(), - Some(hash), - None, - ) - .unwrap(); + let runtime_transaction_static = + RuntimeTransaction::::try_from( + test_transaction + .add_compute_unit_limit(compute_unit_limit) + .add_compute_unit_price(compute_unit_price) + .add_loaded_accounts_bytes(loaded_accounts_bytes) + .to_sanitized_versioned_transaction(), + Some(hash), + None, + ) + .unwrap(); assert_eq!(&hash, runtime_transaction_static.message_hash()); assert!(!runtime_transaction_static.is_simple_vote_tx()); diff --git a/runtime/src/bank.rs b/runtime/src/bank.rs index 0a0bacea0eef1c..f54e5f46793c64 100644 --- a/runtime/src/bank.rs +++ b/runtime/src/bank.rs @@ -5215,11 +5215,10 @@ impl Bank { self.last_blockhash().as_ref(), ]); - let epoch_accounts_hash = self.should_include_epoch_accounts_hash().then(|| { - let epoch_accounts_hash = self.wait_get_epoch_accounts_hash(); + let epoch_accounts_hash = self.wait_get_epoch_accounts_hash(); + if let Some(epoch_accounts_hash) = epoch_accounts_hash { hash = hashv(&[hash.as_ref(), epoch_accounts_hash.as_ref().as_ref()]); - epoch_accounts_hash - }); + }; let buf = self .hard_forks @@ -5264,9 +5263,13 @@ impl Bank { self.parent_slot() < stop_slot && self.slot() >= stop_slot } - /// If the epoch accounts hash should be included in this Bank, then fetch it. If the EAH + /// If the epoch accounts hash should be included in this Bank, then fetch it. If the EAH /// calculation has not completed yet, this fn will block until it does complete. - fn wait_get_epoch_accounts_hash(&self) -> EpochAccountsHash { + fn wait_get_epoch_accounts_hash(&self) -> Option { + if !self.should_include_epoch_accounts_hash() { + return None; + } + let (epoch_accounts_hash, waiting_time_us) = measure_us!(self .rc .accounts @@ -5279,7 +5282,7 @@ impl Bank { ("slot", self.slot(), i64), ("waiting-time-us", waiting_time_us, i64), ); - epoch_accounts_hash + Some(epoch_accounts_hash) } /// Used by ledger tool to run a final hash calculation once all ledger replay has completed. diff --git a/runtime/src/bank/address_lookup_table.rs b/runtime/src/bank/address_lookup_table.rs index 344f1e8bdf09aa..4fa4e2bc0f570a 100644 --- a/runtime/src/bank/address_lookup_table.rs +++ b/runtime/src/bank/address_lookup_table.rs @@ -8,6 +8,7 @@ use { }, transaction::AddressLoader, }, + solana_svm_transaction::message_address_table_lookup::SVMMessageAddressTableLookup, }; fn into_address_loader_error(err: AddressLookupError) -> AddressLoaderError { @@ -25,6 +26,20 @@ impl AddressLoader for &Bank { fn load_addresses( self, address_table_lookups: &[MessageAddressTableLookup], + ) -> Result { + self.load_addresses_from_ref( + address_table_lookups + .iter() + .map(SVMMessageAddressTableLookup::from), + ) + } +} + +impl Bank { + /// Load addresses from an iterator of `SVMMessageAddressTableLookup`. + pub fn load_addresses_from_ref<'a>( + &self, + address_table_lookups: impl Iterator>, ) -> Result { let slot_hashes = self .transaction_processor @@ -33,7 +48,6 @@ impl AddressLoader for &Bank { .map_err(|_| AddressLoaderError::SlotHashesSysvarNotFound)?; address_table_lookups - .iter() .map(|address_table_lookup| { self.rc .accounts diff --git a/runtime/src/bank/bank_hash_details.rs b/runtime/src/bank/bank_hash_details.rs index 5ab13d85c4d89b..ba341a52044f8c 100644 --- a/runtime/src/bank/bank_hash_details.rs +++ b/runtime/src/bank/bank_hash_details.rs @@ -104,66 +104,30 @@ impl From for TransactionCommitDetails { } } -/// The components that go into a bank hash calculation for a single bank/slot. #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize, Default)] pub struct SlotDetails { pub slot: Slot, pub bank_hash: String, - #[serde(skip_serializing_if = "String::is_empty")] - #[serde(default)] + #[serde(skip_serializing_if = "Option::is_none", default, flatten)] + pub bank_hash_components: Option, + #[serde(skip_serializing_if = "Vec::is_empty", default)] + pub transactions: Vec, +} + +/// The components that go into a bank hash calculation for a single bank +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize, Default)] +pub struct BankHashComponents { pub parent_bank_hash: String, - #[serde(skip_serializing_if = "String::is_empty")] - #[serde(default)] pub accounts_delta_hash: String, - #[serde(skip_serializing_if = "u64_is_zero")] - #[serde(default)] pub signature_count: u64, - #[serde(skip_serializing_if = "String::is_empty")] - #[serde(default)] pub last_blockhash: String, - #[serde(skip_serializing_if = "accounts_is_empty")] - #[serde(default)] + #[serde(skip_serializing_if = "Option::is_none")] + pub epoch_accounts_hash: Option, pub accounts: AccountsDetails, - #[serde(skip_serializing_if = "Vec::is_empty")] - #[serde(default)] - pub transactions: Vec, -} - -fn u64_is_zero(val: &u64) -> bool { - *val == 0 -} - -fn accounts_is_empty(accounts: &AccountsDetails) -> bool { - accounts.accounts.is_empty() } impl SlotDetails { - pub fn new( - slot: Slot, - bank_hash: Hash, - parent_bank_hash: Hash, - accounts_delta_hash: Hash, - signature_count: u64, - last_blockhash: Hash, - accounts: AccountsDetails, - ) -> Self { - Self { - slot, - bank_hash: bank_hash.to_string(), - parent_bank_hash: parent_bank_hash.to_string(), - accounts_delta_hash: accounts_delta_hash.to_string(), - signature_count, - last_blockhash: last_blockhash.to_string(), - accounts, - transactions: Vec::new(), - } - } -} - -impl TryFrom<&Bank> for SlotDetails { - type Error = String; - - fn try_from(bank: &Bank) -> Result { + pub fn new_from_bank(bank: &Bank, include_bank_hash_components: bool) -> Result { let slot = bank.slot(); if !bank.is_frozen() { return Err(format!( @@ -171,26 +135,38 @@ impl TryFrom<&Bank> for SlotDetails { )); } - // This bank is frozen; as a result, we know that the state has been - // hashed which means the delta hash is Some(). So, .unwrap() is safe - let AccountsDeltaHash(accounts_delta_hash) = bank - .rc - .accounts - .accounts_db - .get_accounts_delta_hash(slot) - .unwrap(); - - let accounts = bank.get_accounts_for_bank_hash_details(); + let bank_hash_components = if include_bank_hash_components { + // This bank is frozen; as a result, we know that the state has been + // hashed which means the delta hash is Some(). So, .unwrap() is safe + let AccountsDeltaHash(accounts_delta_hash) = bank + .rc + .accounts + .accounts_db + .get_accounts_delta_hash(slot) + .unwrap(); + let accounts = bank.get_accounts_for_bank_hash_details(); + + Some(BankHashComponents { + parent_bank_hash: bank.parent_hash().to_string(), + accounts_delta_hash: accounts_delta_hash.to_string(), + signature_count: bank.signature_count(), + last_blockhash: bank.last_blockhash().to_string(), + // The bank is already frozen so this should not have to wait + epoch_accounts_hash: bank + .wait_get_epoch_accounts_hash() + .map(|hash| hash.as_ref().to_string()), + accounts: AccountsDetails { accounts }, + }) + } else { + None + }; - Ok(Self::new( + Ok(Self { slot, - bank.hash(), - bank.parent_hash(), - accounts_delta_hash, - bank.signature_count(), - bank.last_blockhash(), - AccountsDetails { accounts }, - )) + bank_hash: bank.hash().to_string(), + bank_hash_components, + transactions: Vec::new(), + }) } } @@ -291,7 +267,7 @@ impl<'de> Deserialize<'de> for AccountsDetails { /// Output the components that comprise the overall bank hash for the supplied `Bank` pub fn write_bank_hash_details_file(bank: &Bank) -> std::result::Result<(), String> { - let slot_details = SlotDetails::try_from(bank)?; + let slot_details = SlotDetails::new_from_bank(bank, /*include_bank_hash_mixins:*/ true)?; let details = BankHashDetails::new(vec![slot_details]); let parent_dir = bank @@ -328,11 +304,9 @@ pub mod tests { use super::*; fn build_details(num_slots: usize) -> BankHashDetails { - use solana_sdk::hash::{hash, hashv}; - let slot_details: Vec<_> = (0..num_slots) .map(|slot| { - let signature_count = 314; + let slot = slot as u64; let account = AccountSharedData::from(Account { lamports: 123_456_789, @@ -342,7 +316,7 @@ pub mod tests { rent_epoch: 123, }); let account_pubkey = Pubkey::new_unique(); - let account_hash = AccountHash(hash("account".as_bytes())); + let account_hash = AccountHash(solana_sdk::hash::hash("account".as_bytes())); let accounts = AccountsDetails { accounts: vec![PubkeyHashAccount { pubkey: account_pubkey, @@ -351,20 +325,23 @@ pub mod tests { }], }; - let bank_hash = hashv(&["bank".as_bytes(), &slot.to_le_bytes()]); - let parent_bank_hash = hash("parent_bank".as_bytes()); - let accounts_delta_hash = hash("accounts_delta".as_bytes()); - let last_blockhash = hash("last_blockhash".as_bytes()); - - SlotDetails::new( - slot as Slot, - bank_hash, - parent_bank_hash, - accounts_delta_hash, - signature_count, - last_blockhash, - accounts, - ) + SlotDetails { + slot, + bank_hash: format!("bank{slot}"), + bank_hash_components: Some(BankHashComponents { + parent_bank_hash: "parent_bank_hash".into(), + accounts_delta_hash: "accounts_delta_hash".into(), + signature_count: slot + 10, + last_blockhash: "last_blockhash".into(), + epoch_accounts_hash: if slot % 2 == 0 { + Some("epoch_accounts_hash".into()) + } else { + None + }, + accounts, + }), + transactions: vec![], + } }) .collect(); diff --git a/runtime/src/bank/builtin_programs.rs b/runtime/src/bank/builtin_programs.rs index 1944c9ea0bac62..7c12bb23fbd6b0 100644 --- a/runtime/src/bank/builtin_programs.rs +++ b/runtime/src/bank/builtin_programs.rs @@ -75,13 +75,14 @@ mod tests_core_bpf_migration { tests::{create_genesis_config, new_bank_from_parent_with_bank_forks}, Bank, }, + solana_program_runtime::loaded_programs::ProgramCacheEntry, solana_sdk::{ account::{AccountSharedData, ReadableAccount, WritableAccount}, bpf_loader_upgradeable::{self, get_program_data_address, UpgradeableLoaderState}, epoch_schedule::EpochSchedule, feature::{self, Feature}, feature_set::FeatureSet, - instruction::Instruction, + instruction::{AccountMeta, Instruction}, message::Message, native_loader, native_token::LAMPORTS_PER_SOL, @@ -93,6 +94,27 @@ mod tests_core_bpf_migration { test_case::test_case, }; + // CPI mockup to test CPI to newly migrated programs. + mod cpi_mockup { + use { + solana_program_runtime::declare_process_instruction, + solana_sdk::instruction::Instruction, + }; + + declare_process_instruction!(Entrypoint, 0, |invoke_context| { + let transaction_context = &invoke_context.transaction_context; + let instruction_context = transaction_context.get_current_instruction_context()?; + + let target_program_id = transaction_context.get_key_of_account_at_index( + instruction_context.get_index_of_instruction_account_in_transaction(0)?, + )?; + + let instruction = Instruction::new_with_bytes(*target_program_id, &[], Vec::new()); + + invoke_context.native_invoke(instruction.into(), &[]) + }); + } + fn test_elf() -> Vec { let mut elf = Vec::new(); File::open("../programs/bpf_loader/test_elfs/out/noop_aligned.so") @@ -144,6 +166,16 @@ mod tests_core_bpf_migration { let mut root_bank = Bank::new_for_tests(&genesis_config); + // Set up the CPI mockup to test CPI'ing to the migrated program. + let cpi_program_id = Pubkey::new_unique(); + let cpi_program_name = "mock_cpi_program"; + root_bank.transaction_processor.add_builtin( + &root_bank, + cpi_program_id, + cpi_program_name, + ProgramCacheEntry::new_builtin(0, cpi_program_name.len(), cpi_mockup::Entrypoint::vm), + ); + let (builtin_id, config) = prototype.deconstruct(); let feature_id = &config.feature_id; let source_buffer_address = &config.source_buffer_address; @@ -219,6 +251,21 @@ mod tests_core_bpf_migration { )) .unwrap(); + // Successfully invoke the new BPF builtin program via CPI. + bank.process_transaction(&Transaction::new( + &vec![&mint_keypair], + Message::new( + &[Instruction::new_with_bytes( + cpi_program_id, + &[], + vec![AccountMeta::new_readonly(*builtin_id, false)], + )], + Some(&mint_keypair.pubkey()), + ), + bank.last_blockhash(), + )) + .unwrap(); + // Simulate crossing another epoch boundary for a new bank. goto_end_of_slot(bank.clone()); first_slot_in_next_epoch += slots_per_epoch; @@ -243,6 +290,21 @@ mod tests_core_bpf_migration { bank.last_blockhash(), )) .unwrap(); + + // Again, successfully invoke the new BPF builtin program via CPI. + bank.process_transaction(&Transaction::new( + &vec![&mint_keypair], + Message::new( + &[Instruction::new_with_bytes( + cpi_program_id, + &[], + vec![AccountMeta::new_readonly(*builtin_id, false)], + )], + Some(&mint_keypair.pubkey()), + ), + bank.last_blockhash(), + )) + .unwrap(); } // Simulate a failure to migrate the program. diff --git a/svm/src/account_loader.rs b/svm/src/account_loader.rs index 5d14c35ec0677c..c7165eca8ff4a5 100644 --- a/svm/src/account_loader.rs +++ b/svm/src/account_loader.rs @@ -1,8 +1,11 @@ use { crate::{ - account_overrides::AccountOverrides, account_rent_state::RentState, nonce_info::NonceInfo, - rollback_accounts::RollbackAccounts, transaction_error_metrics::TransactionErrorMetrics, - transaction_processing_callback::TransactionProcessingCallback, + account_overrides::AccountOverrides, + account_rent_state::RentState, + nonce_info::NonceInfo, + rollback_accounts::RollbackAccounts, + transaction_error_metrics::TransactionErrorMetrics, + transaction_processing_callback::{AccountState, TransactionProcessingCallback}, }, itertools::Itertools, solana_compute_budget::compute_budget_limits::ComputeBudgetLimits, @@ -410,9 +413,11 @@ fn load_transaction_account( loaded_programs: &ProgramCacheForTxBatch, ) -> Result<(LoadedTransactionAccount, bool)> { let mut account_found = true; + let mut was_inspected = false; let is_instruction_account = u8::try_from(account_index) .map(|i| instruction_accounts.contains(&&i)) .unwrap_or(false); + let is_writable = message.is_writable(account_index); let loaded_account = if solana_sdk::sysvar::instructions::check_id(account_key) { // Since the instructions sysvar is constructed by the SVM and modified // for each transaction instruction, it cannot be overridden. @@ -429,7 +434,7 @@ fn load_transaction_account( account: account_override.clone(), rent_collected: 0, } - } else if let Some(program) = (!is_instruction_account && !message.is_writable(account_index)) + } else if let Some(program) = (!is_instruction_account && !is_writable) .then_some(()) .and_then(|_| loaded_programs.find(account_key)) { @@ -447,7 +452,17 @@ fn load_transaction_account( callbacks .get_account_shared_data(account_key) .map(|mut account| { - let rent_collected = if message.is_writable(account_index) { + let rent_collected = if is_writable { + // Inspect the account prior to collecting rent, since + // rent collection can modify the account. + debug_assert!(!was_inspected); + callbacks.inspect_account( + account_key, + AccountState::Alive(&account), + is_writable, + ); + was_inspected = true; + collect_rent_from_account( feature_set, rent_collector, @@ -480,6 +495,15 @@ fn load_transaction_account( }) }; + if !was_inspected { + let account_state = if account_found { + AccountState::Alive(&loaded_account.account) + } else { + AccountState::Dead + }; + callbacks.inspect_account(account_key, account_state, is_writable); + } + Ok((loaded_account, account_found)) } diff --git a/svm/src/transaction_processing_callback.rs b/svm/src/transaction_processing_callback.rs index 760a6606568798..34741e9bd7ae3a 100644 --- a/svm/src/transaction_processing_callback.rs +++ b/svm/src/transaction_processing_callback.rs @@ -7,4 +7,16 @@ pub trait TransactionProcessingCallback { fn get_account_shared_data(&self, pubkey: &Pubkey) -> Option; fn add_builtin_account(&self, _name: &str, _program_id: &Pubkey) {} + + fn inspect_account(&self, _address: &Pubkey, _account_state: AccountState, _is_writable: bool) { + } +} + +/// The state the account is in initially, before transaction processing +#[derive(Debug)] +pub enum AccountState<'a> { + /// This account is dead, and will be created by this transaction + Dead, + /// This account is alive, and already existed prior to this transaction + Alive(&'a AccountSharedData), } diff --git a/transaction-view/src/lib.rs b/transaction-view/src/lib.rs index 40cea3da25393c..13c5a43fd4016c 100644 --- a/transaction-view/src/lib.rs +++ b/transaction-view/src/lib.rs @@ -5,7 +5,7 @@ pub mod bytes; mod bytes; #[allow(dead_code)] -mod address_table_lookup_meta; +pub mod address_table_lookup_meta; #[allow(dead_code)] mod instructions_meta; #[allow(dead_code)] @@ -14,6 +14,6 @@ pub mod result; #[allow(dead_code)] mod signature_meta; #[allow(dead_code)] -mod static_account_keys_meta; +pub mod static_account_keys_meta; #[allow(dead_code)] pub mod transaction_meta; diff --git a/transaction-view/src/static_account_keys_meta.rs b/transaction-view/src/static_account_keys_meta.rs index bea6a3e7394442..bae934863cfa4e 100644 --- a/transaction-view/src/static_account_keys_meta.rs +++ b/transaction-view/src/static_account_keys_meta.rs @@ -10,12 +10,12 @@ use { // This means the maximum number of 32 byte keys is 38. // 38 as an min-sized encoded u16 is 1 byte. // We can simply read this byte, if it's >38 we can return None. -const MAX_STATIC_ACCOUNTS_PER_PACKET: u8 = +pub const MAX_STATIC_ACCOUNTS_PER_PACKET: u8 = (PACKET_DATA_SIZE / core::mem::size_of::()) as u8; /// Contains meta-data about the static account keys in a transaction packet. #[derive(Default)] -pub struct StaticAccountKeysMeta { +pub(crate) struct StaticAccountKeysMeta { /// The number of static accounts in the transaction. pub(crate) num_static_accounts: u8, /// The offset to the first static account in the transaction. diff --git a/turbine/src/quic_endpoint.rs b/turbine/src/quic_endpoint.rs index c137e1c0a6ef60..d16bb5c188b831 100644 --- a/turbine/src/quic_endpoint.rs +++ b/turbine/src/quic_endpoint.rs @@ -200,7 +200,7 @@ async fn run_server( ) { let stats = Arc::::default(); let report_metrics_task = - tokio::task::spawn(report_metrics_task("repair_quic_server", stats.clone())); + tokio::task::spawn(report_metrics_task("turbine_quic_server", stats.clone())); while let Some(connecting) = endpoint.accept().await { tokio::task::spawn(handle_connecting_task( endpoint.clone(), @@ -227,7 +227,7 @@ async fn run_client( ) { let stats = Arc::::default(); let report_metrics_task = - tokio::task::spawn(report_metrics_task("repair_quic_client", stats.clone())); + tokio::task::spawn(report_metrics_task("turbine_quic_client", stats.clone())); while let Some((remote_address, bytes)) = receiver.recv().await { let Some(bytes) = try_route_bytes(&remote_address, bytes, &*router.read().await, &stats) else { diff --git a/validator/src/cli/thread_args.rs b/validator/src/cli/thread_args.rs index 42115d25ee3b83..1841da54a1e028 100644 --- a/validator/src/cli/thread_args.rs +++ b/validator/src/cli/thread_args.rs @@ -18,10 +18,11 @@ pub struct DefaultThreadArgs { impl Default for DefaultThreadArgs { fn default() -> Self { Self { - ip_echo_server_threads: IpEchoServerThreadsArg::default().to_string(), - replay_forks_threads: ReplayForksThreadsArg::default().to_string(), - replay_transactions_threads: ReplayTransactionsThreadsArg::default().to_string(), - tvu_receive_threads: TvuReceiveThreadsArg::default().to_string(), + ip_echo_server_threads: IpEchoServerThreadsArg::bounded_default().to_string(), + replay_forks_threads: ReplayForksThreadsArg::bounded_default().to_string(), + replay_transactions_threads: ReplayTransactionsThreadsArg::bounded_default() + .to_string(), + tvu_receive_threads: TvuReceiveThreadsArg::bounded_default().to_string(), } } } @@ -85,6 +86,12 @@ trait ThreadArg { /// The default number of threads fn default() -> usize; + /// The default number of threads, bounded by Self::max() + /// This prevents potential CLAP issues on low core count machines where + /// a fixed value in Self::default() could be greater than Self::max() + fn bounded_default() -> usize { + std::cmp::min(Self::default(), Self::max()) + } /// The minimum allowed number of threads (inclusive) fn min() -> usize { 1 diff --git a/validator/src/main.rs b/validator/src/main.rs index 45c35a43ef5392..349f01ecace8d7 100644 --- a/validator/src/main.rs +++ b/validator/src/main.rs @@ -2038,6 +2038,13 @@ pub fn main() { return; } + // Bootstrap code above pushes a contact-info with more recent timestamp to + // gossip. If the node is staked the contact-info lingers in gossip causing + // false duplicate nodes error. + // Below line refreshes the timestamp on contact-info so that it overrides + // the one pushed by bootstrap. + node.info.hot_swap_pubkey(identity_keypair.pubkey()); + let validator = Validator::new( node, identity_keypair,