From 24177fac262fd403cc198f107ac21ffb67b5d85e Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Mon, 4 Nov 2024 18:12:14 +0700 Subject: [PATCH 01/21] feat: hardcoded identity transfers in strategy tests --- .../tests/strategy_tests/main.rs | 7 +- .../tests/strategy_tests/strategy.rs | 14 ++- .../tests/strategy_tests/voting_tests.rs | 90 +++++++++++-------- packages/strategy-tests/src/lib.rs | 71 ++++++++++----- packages/strategy-tests/src/operations.rs | 19 +++- packages/strategy-tests/src/transitions.rs | 2 +- 6 files changed, 137 insertions(+), 66 deletions(-) diff --git a/packages/rs-drive-abci/tests/strategy_tests/main.rs b/packages/rs-drive-abci/tests/strategy_tests/main.rs index 2312241cc6..f2122d627e 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/main.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/main.rs @@ -2602,7 +2602,10 @@ mod tests { &simple_signer, &mut rng, platform_version, - ); + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let strategy = NetworkStrategy { strategy: Strategy { @@ -3910,7 +3913,7 @@ mod tests { strategy: Strategy { start_contracts: vec![], operations: vec![Operation { - op_type: OperationType::IdentityTransfer, + op_type: OperationType::IdentityTransfer(None), frequency: Frequency { times_per_block_range: 1..3, chance_per_block: None, diff --git a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs index 667b846868..4d1a7ccb62 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs @@ -41,7 +41,7 @@ use drive_abci::rpc::core::MockCoreRPCLike; use rand::prelude::{IteratorRandom, SliceRandom, StdRng}; use rand::Rng; use strategy_tests::Strategy; -use strategy_tests::transitions::{create_state_transitions_for_identities, create_state_transitions_for_identities_and_proofs, instant_asset_lock_proof_fixture, instant_asset_lock_proof_fixture_with_dynamic_range}; +use strategy_tests::transitions::{create_state_transitions_for_identities, create_state_transitions_for_identities_and_proofs, instant_asset_lock_proof_fixture_with_dynamic_range}; use std::borrow::Cow; use std::collections::{BTreeMap, HashMap, HashSet}; use std::ops::RangeInclusive; @@ -405,7 +405,15 @@ impl NetworkStrategy { state_transitions.append(&mut new_transitions); } if !self.strategy.start_identities.hard_coded.is_empty() { - state_transitions.extend(self.strategy.start_identities.hard_coded.clone()); + state_transitions.extend( + self.strategy.start_identities.hard_coded.iter().filter_map( + |(identity, transition)| { + transition.as_ref().map(|create_transition| { + (identity.clone(), create_transition.clone()) + }) + }, + ), + ); } } let frequency = &self.strategy.identity_inserts.frequency; @@ -1196,7 +1204,7 @@ impl NetworkStrategy { operations.push(state_transition); } } - OperationType::IdentityTransfer if current_identities.len() > 1 => { + OperationType::IdentityTransfer(_) if current_identities.len() > 1 => { let identities_clone = current_identities.clone(); // Sender is the first in the list, which should be loaded_identity diff --git a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs index e14f8d7b1b..2264a4cd5f 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs @@ -79,13 +79,17 @@ mod tests { simple_signer.add_keys(keys1); - let start_identities = create_state_transitions_for_identities( - vec![identity1], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive @@ -363,13 +367,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive @@ -635,13 +643,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive @@ -988,13 +1000,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive @@ -1353,13 +1369,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive diff --git a/packages/strategy-tests/src/lib.rs b/packages/strategy-tests/src/lib.rs index 61395d99f2..65e8a51f85 100644 --- a/packages/strategy-tests/src/lib.rs +++ b/packages/strategy-tests/src/lib.rs @@ -44,6 +44,7 @@ use platform_version::TryFromPlatformVersioned; use rand::prelude::StdRng; use rand::seq::{IteratorRandom, SliceRandom}; use rand::Rng; +use transitions::create_identity_credit_transfer_transition; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::ops::RangeInclusive; use bincode::{Decode, Encode}; @@ -146,7 +147,7 @@ pub struct StartIdentities { pub keys_per_identity: u8, pub starting_balances: u64, // starting balance in duffs pub extra_keys: KeyMaps, - pub hard_coded: Vec<(Identity, StateTransition)>, + pub hard_coded: Vec<(Identity, Option)>, } /// Identities to register on the first block of the strategy @@ -1287,38 +1288,66 @@ impl Strategy { } // Generate state transition for identity transfer operation - OperationType::IdentityTransfer if current_identities.len() > 1 => { + OperationType::IdentityTransfer(identity_transfer_info) => { for _ in 0..count { - let identities_count = current_identities.len(); - if identities_count == 0 { - break; - } + if let Some(transfer_info) = identity_transfer_info { + let sender = self + .start_identities + .hard_coded + .iter() + .find(|(identity, _)| identity.id() == transfer_info.from) + .expect( + "Expected to find sender identity in hardcoded start identities", + ); + let recipient = self + .start_identities + .hard_coded + .iter() + .find(|(identity, _)| identity.id() == transfer_info.to) + .expect( + "Expected to find recipient identity in hardcoded start identities", + ); + + let state_transition = create_identity_credit_transfer_transition( + &sender.0, + &recipient.0, + identity_nonce_counter, + signer, // Does this mean the loaded identity must be the sender since we're signing with it? + transfer_info.amount, + ); + operations.push(state_transition); + } else if current_identities.len() > 1 { + let identities_count = current_identities.len(); + if identities_count == 0 { + break; + } - // Select a random identity from the current_identities for the sender - let random_index_sender = rng.gen_range(0..identities_count); + // Select a random identity from the current_identities for the sender + let random_index_sender = rng.gen_range(0..identities_count); - // Clone current_identities to a Vec for manipulation - let mut unused_identities: Vec<_> = - current_identities.iter().cloned().collect(); - unused_identities.remove(random_index_sender); // Remove the sender - let unused_identities_count = unused_identities.len(); + // Clone current_identities to a Vec for manipulation + let mut unused_identities: Vec<_> = + current_identities.iter().cloned().collect(); + unused_identities.remove(random_index_sender); // Remove the sender + let unused_identities_count = unused_identities.len(); - // Select a random identity from the remaining ones for the recipient - let random_index_recipient = rng.gen_range(0..unused_identities_count); - let recipient = &unused_identities[random_index_recipient]; + // Select a random identity from the remaining ones for the recipient + let random_index_recipient = + rng.gen_range(0..unused_identities_count); + let recipient = &unused_identities[random_index_recipient]; - // Use the sender index on the original slice - let sender = &mut current_identities[random_index_sender]; + // Use the sender index on the original slice + let sender = &mut current_identities[random_index_sender]; - let state_transition = - crate::transitions::create_identity_credit_transfer_transition( + let state_transition = create_identity_credit_transfer_transition( sender, recipient, identity_nonce_counter, signer, 300000, ); - operations.push(state_transition); + operations.push(state_transition); + } } } diff --git a/packages/strategy-tests/src/operations.rs b/packages/strategy-tests/src/operations.rs index 675e996843..d35fc9f503 100644 --- a/packages/strategy-tests/src/operations.rs +++ b/packages/strategy-tests/src/operations.rs @@ -497,6 +497,13 @@ impl VoteAction { pub type AmountRange = RangeInclusive; +#[derive(Clone, Debug, PartialEq, Encode, Decode)] +pub struct IdentityTransferInfo { + pub from: Identifier, + pub to: Identifier, + pub amount: Credits, +} + #[derive(Clone, Debug, PartialEq)] pub enum OperationType { Document(DocumentOp), @@ -505,7 +512,7 @@ pub enum OperationType { IdentityWithdrawal(AmountRange), ContractCreate(RandomDocumentTypeParameters, DocumentTypeCount), ContractUpdate(DataContractUpdateOp), - IdentityTransfer, + IdentityTransfer(Option), ResourceVote(ResourceVoteOp), } @@ -517,7 +524,7 @@ enum OperationTypeInSerializationFormat { IdentityWithdrawal(AmountRange), ContractCreate(RandomDocumentTypeParameters, DocumentTypeCount), ContractUpdate(Vec), - IdentityTransfer, + IdentityTransfer(Option), ResourceVote(ResourceVoteOpSerializable), } @@ -563,7 +570,9 @@ impl PlatformSerializableWithPlatformVersion for OperationType { contract_op_in_serialization_format, ) } - OperationType::IdentityTransfer => OperationTypeInSerializationFormat::IdentityTransfer, + OperationType::IdentityTransfer(identity_transfer_info) => { + OperationTypeInSerializationFormat::IdentityTransfer(identity_transfer_info) + } OperationType::ResourceVote(resource_vote_op) => { let vote_op_in_serialization_format = resource_vote_op.try_into_platform_versioned(platform_version)?; @@ -626,7 +635,9 @@ impl PlatformDeserializableWithPotentialValidationFromVersionedStructure for Ope )?; OperationType::ContractUpdate(update_op) } - OperationTypeInSerializationFormat::IdentityTransfer => OperationType::IdentityTransfer, + OperationTypeInSerializationFormat::IdentityTransfer(identity_transfer_info) => { + OperationType::IdentityTransfer(identity_transfer_info) + } OperationTypeInSerializationFormat::ResourceVote(resource_vote_op) => { let vote_op = resource_vote_op.try_into_platform_versioned(platform_version)?; OperationType::ResourceVote(vote_op) diff --git a/packages/strategy-tests/src/transitions.rs b/packages/strategy-tests/src/transitions.rs index 85d03eb333..c77b51e290 100644 --- a/packages/strategy-tests/src/transitions.rs +++ b/packages/strategy-tests/src/transitions.rs @@ -802,7 +802,7 @@ pub fn create_identity_withdrawal_transition_with_output_address( /// - If the sender's identity does not have a suitable authentication key available for signing. /// - If there's an error during the signing process. pub fn create_identity_credit_transfer_transition( - identity: &mut Identity, + identity: &Identity, recipient: &Identity, identity_nonce_counter: &mut BTreeMap, signer: &mut SimpleSigner, From dacc6dbd04b6c4f9593c7c8ab128ac1f5a094e4f Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Mon, 4 Nov 2024 18:30:19 +0700 Subject: [PATCH 02/21] fix(drive): uncommitted state if db transaction fails (#2305) --- .../src/abci/handler/finalize_block.rs | 26 +++++++++- .../rs-drive-abci/src/abci/handler/info.rs | 52 ++++++++++++++++--- .../src/abci/handler/prepare_proposal.rs | 43 +++++++++++++++ .../src/abci/handler/process_proposal.rs | 43 +++++++++++++++ 4 files changed, 157 insertions(+), 7 deletions(-) diff --git a/packages/rs-drive-abci/src/abci/handler/finalize_block.rs b/packages/rs-drive-abci/src/abci/handler/finalize_block.rs index 9653391c7d..852f85cc6b 100644 --- a/packages/rs-drive-abci/src/abci/handler/finalize_block.rs +++ b/packages/rs-drive-abci/src/abci/handler/finalize_block.rs @@ -5,6 +5,7 @@ use crate::execution::types::block_execution_context::v0::BlockExecutionContextV use crate::platform_types::cleaned_abci_messages::finalized_block_cleaned_request::v0::FinalizeBlockCleanedRequest; use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::rpc::core::CoreRPCLike; +use dpp::dashcore::Network; use std::sync::atomic::Ordering; use tenderdash_abci::proto::abci as proto; @@ -66,7 +67,30 @@ where )); } - app.commit_transaction(platform_version)?; + let result = app.commit_transaction(platform_version); + + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state and updating the cache. + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable these fixes at the block when we consider the state is consistent. + let config = &app.platform().config; + + if app.platform().config.network == Network::Dash + && config.abci.chain_id == "evo1" + && block_height < 33000 + { + // Old behavior on mainnet below block 33000 + result?; + } else { + // In case if transaction commit failed we still have caches in memory that + // corresponds to the data that we weren't able to commit. + // The simplified solution is to restart the Drive, so all caches + // will be restored from the disk and try to process this block again. + // TODO: We need a better handling of the transaction is busy error with retry logic. + result.expect("commit transaction"); + } app.platform() .committed_block_height_guard diff --git a/packages/rs-drive-abci/src/abci/handler/info.rs b/packages/rs-drive-abci/src/abci/handler/info.rs index dbb8501891..9ac9d31626 100644 --- a/packages/rs-drive-abci/src/abci/handler/info.rs +++ b/packages/rs-drive-abci/src/abci/handler/info.rs @@ -3,6 +3,7 @@ use crate::abci::AbciError; use crate::error::Error; use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::rpc::core::CoreRPCLike; +use dpp::dashcore::Network; use dpp::version::DESIRED_PLATFORM_VERSION; use tenderdash_abci::proto::abci as proto; @@ -21,19 +22,58 @@ where let platform_state = app.platform().state.load(); - let state_app_hash = platform_state + let last_block_height = platform_state.last_committed_block_height() as i64; + + // Verify that Platform State corresponds to Drive commited state + let platform_state_app_hash = platform_state .last_committed_block_app_hash() - .map(|app_hash| app_hash.to_vec()) .unwrap_or_default(); + let grove_version = &platform_state + .current_platform_version()? + .drive + .grove_version; + + let drive_storage_root_hash = app + .platform() + .drive + .grove + .root_hash(None, grove_version) + .unwrap()?; + + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state and updating the cache. + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable these fixes at the block when we consider the state is consistent. + let config = &app.platform().config; + + #[allow(clippy::collapsible_if)] + if !(config.network == Network::Dash + && config.abci.chain_id == "evo1" + && last_block_height < 33000) + { + // App hash in memory must be equal to app hash on disk + if drive_storage_root_hash != platform_state_app_hash { + // We panic because we can't recover from this situation. + // Better to restart the Drive, so we might self-heal the node + // reloading state form the disk + panic!( + "drive and platform state app hash mismatch: drive_storage_root_hash: {:?}, platform_state_app_hash: {:?}", + drive_storage_root_hash, platform_state_app_hash + ); + } + } + let desired_protocol_version = DESIRED_PLATFORM_VERSION.protocol_version; let response = proto::ResponseInfo { data: "".to_string(), app_version: desired_protocol_version as u64, - last_block_height: platform_state.last_committed_block_height() as i64, + last_block_height, version: env!("CARGO_PKG_VERSION").to_string(), - last_block_app_hash: state_app_hash.clone(), + last_block_app_hash: platform_state_app_hash.to_vec(), }; tracing::debug!( @@ -41,8 +81,8 @@ where software_version = env!("CARGO_PKG_VERSION"), block_version = request.block_version, p2p_version = request.p2p_version, - app_hash = hex::encode(state_app_hash), - height = platform_state.last_committed_block_height(), + app_hash = hex::encode(platform_state_app_hash), + last_block_height, "Handshake with consensus engine", ); diff --git a/packages/rs-drive-abci/src/abci/handler/prepare_proposal.rs b/packages/rs-drive-abci/src/abci/handler/prepare_proposal.rs index 18252d0d45..61f58a0196 100644 --- a/packages/rs-drive-abci/src/abci/handler/prepare_proposal.rs +++ b/packages/rs-drive-abci/src/abci/handler/prepare_proposal.rs @@ -11,6 +11,7 @@ use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; use crate::rpc::core::CoreRPCLike; use dpp::dashcore::hashes::Hash; +use dpp::dashcore::Network; use dpp::version::TryIntoPlatformVersioned; use drive::grovedb_storage::Error::RocksDBError; use tenderdash_abci::proto::abci as proto; @@ -35,6 +36,48 @@ where let platform_state = app.platform().state.load(); + // Verify that Platform State corresponds to Drive commited state + let platform_state_app_hash = platform_state + .last_committed_block_app_hash() + .unwrap_or_default(); + + let grove_version = &platform_state + .current_platform_version()? + .drive + .grove_version; + + let drive_storage_root_hash = app + .platform() + .drive + .grove + .root_hash(None, grove_version) + .unwrap()?; + + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state and updating the cache. + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable these fixes at the block when we consider the state is consistent. + let config = &app.platform().config; + + #[allow(clippy::collapsible_if)] + if !(config.network == Network::Dash + && config.abci.chain_id == "evo1" + && request.height < 33000) + { + // App hash in memory must be equal to app hash on disk + if drive_storage_root_hash != platform_state_app_hash { + // We panic because we can't recover from this situation. + // Better to restart the Drive, so we might self-heal the node + // reloading state form the disk + panic!( + "drive and platform state app hash mismatch: drive_storage_root_hash: {:?}, platform_state_app_hash: {:?}", + drive_storage_root_hash, platform_state_app_hash + ); + } + } + let last_committed_core_height = platform_state.last_committed_core_height(); let starting_platform_version = platform_state.current_platform_version()?; diff --git a/packages/rs-drive-abci/src/abci/handler/process_proposal.rs b/packages/rs-drive-abci/src/abci/handler/process_proposal.rs index 5bf547e14a..d40567d3db 100644 --- a/packages/rs-drive-abci/src/abci/handler/process_proposal.rs +++ b/packages/rs-drive-abci/src/abci/handler/process_proposal.rs @@ -12,6 +12,7 @@ use crate::platform_types::block_execution_outcome; use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; use crate::rpc::core::CoreRPCLike; +use dpp::dashcore::Network; use dpp::version::TryIntoPlatformVersioned; use drive::grovedb_storage::Error::RocksDBError; use tenderdash_abci::proto::abci as proto; @@ -179,6 +180,48 @@ where let platform_state = app.platform().state.load(); + // Verify that Platform State corresponds to Drive commited state + let platform_state_app_hash = platform_state + .last_committed_block_app_hash() + .unwrap_or_default(); + + let grove_version = &platform_state + .current_platform_version()? + .drive + .grove_version; + + let drive_storage_root_hash = app + .platform() + .drive + .grove + .root_hash(None, grove_version) + .unwrap()?; + + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state and updating the cache. + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable these fixes at the block when we consider the state is consistent. + let config = &app.platform().config; + + #[allow(clippy::collapsible_if)] + if !(app.platform().config.network == Network::Dash + && config.abci.chain_id == "evo1" + && request.height < 33000) + { + // App hash in memory must be equal to app hash on disk + if drive_storage_root_hash != platform_state_app_hash { + // We panic because we can't recover from this situation. + // Better to restart the Drive, so we might self-heal the node + // reloading state form the disk + panic!( + "drive and platform state app hash mismatch: drive_storage_root_hash: {:?}, platform_state_app_hash: {:?}", + drive_storage_root_hash, platform_state_app_hash + ); + } + } + let starting_platform_version = platform_state.current_platform_version()?; // Running the proposal executes all the state transitions for the block From 306b86cb35fce29bec2c94a660c3923bf13bedd6 Mon Sep 17 00:00:00 2001 From: QuantumExplorer Date: Mon, 4 Nov 2024 12:31:37 +0100 Subject: [PATCH 03/21] fix(drive): apply batch is not using transaction in `remove_all_votes_given_by_identities` (#2309) Co-authored-by: Ivan Shumkov --- .../mod.rs | 3 +++ .../v0/mod.rs | 5 ++++ .../voting/run_dao_platform_events/v0/mod.rs | 1 + .../state_transitions/masternode_vote/mod.rs | 1 + .../mod.rs | 8 +++++++ .../v0/mod.rs | 24 +++++++++++++++++-- 6 files changed, 40 insertions(+), 2 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/mod.rs b/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/mod.rs index 8328eb0fc1..ffd97c4292 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/mod.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/mod.rs @@ -3,6 +3,7 @@ use crate::error::Error; use crate::platform_types::platform::Platform; use crate::platform_types::platform_state::PlatformState; use crate::rpc::core::CoreRPCLike; +use dpp::block::block_info::BlockInfo; use dpp::version::PlatformVersion; use drive::grovedb::TransactionArg; @@ -14,6 +15,7 @@ where /// Removes the votes for removed masternodes pub(in crate::execution) fn remove_votes_for_removed_masternodes( &self, + block_info: &BlockInfo, last_committed_platform_state: &PlatformState, block_platform_state: &PlatformState, transaction: TransactionArg, @@ -26,6 +28,7 @@ where .remove_votes_for_removed_masternodes { 0 => self.remove_votes_for_removed_masternodes_v0( + block_info, last_committed_platform_state, block_platform_state, transaction, diff --git a/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/v0/mod.rs b/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/v0/mod.rs index b0081570c7..04931a3928 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/v0/mod.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/v0/mod.rs @@ -3,6 +3,7 @@ use crate::platform_types::platform::Platform; use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::platform_types::platform_state::PlatformState; use crate::rpc::core::CoreRPCLike; +use dpp::block::block_info::BlockInfo; use dpp::dashcore::hashes::Hash; use dpp::version::PlatformVersion; use drive::grovedb::TransactionArg; @@ -14,6 +15,7 @@ where /// Removes the votes for removed masternodes pub(super) fn remove_votes_for_removed_masternodes_v0( &self, + block_info: &BlockInfo, last_committed_platform_state: &PlatformState, block_platform_state: &PlatformState, transaction: TransactionArg, @@ -29,6 +31,9 @@ where .iter() .map(|pro_tx_hash| pro_tx_hash.as_byte_array().to_vec()) .collect(), + block_info.height, + self.config.network, + self.config.abci.chain_id.as_str(), transaction, platform_version, )?; diff --git a/packages/rs-drive-abci/src/execution/platform_events/voting/run_dao_platform_events/v0/mod.rs b/packages/rs-drive-abci/src/execution/platform_events/voting/run_dao_platform_events/v0/mod.rs index 2ea9357af1..57fbe635b2 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/voting/run_dao_platform_events/v0/mod.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/voting/run_dao_platform_events/v0/mod.rs @@ -21,6 +21,7 @@ where // Remove any votes that self.remove_votes_for_removed_masternodes( + block_info, last_committed_platform_state, block_platform_state, transaction, diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/masternode_vote/mod.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/masternode_vote/mod.rs index fe7d8095b8..9d0394c442 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/masternode_vote/mod.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/masternode_vote/mod.rs @@ -11287,6 +11287,7 @@ mod tests { platform .remove_votes_for_removed_masternodes( + &BlockInfo::default(), &platform_state_before_masternode_identity_removals, &block_platform_state, Some(&transaction), diff --git a/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/mod.rs b/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/mod.rs index 0f5e0d9604..f93d92a424 100644 --- a/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/mod.rs +++ b/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/mod.rs @@ -5,6 +5,8 @@ use crate::drive::Drive; use crate::error::drive::DriveError; use crate::error::Error; +use dpp::dashcore::Network; +use dpp::prelude::BlockHeight; use dpp::version::PlatformVersion; use grovedb::TransactionArg; @@ -14,6 +16,9 @@ impl Drive { pub fn remove_all_votes_given_by_identities( &self, identity_ids_as_byte_arrays: Vec>, + block_height: BlockHeight, + network: Network, + chain_id: &str, transaction: TransactionArg, platform_version: &PlatformVersion, ) -> Result<(), Error> { @@ -26,6 +31,9 @@ impl Drive { { 0 => self.remove_all_votes_given_by_identities_v0( identity_ids_as_byte_arrays, + block_height, + network, + chain_id, transaction, platform_version, ), diff --git a/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/v0/mod.rs b/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/v0/mod.rs index 3c36b0ec64..81b3d0fab7 100644 --- a/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/v0/mod.rs +++ b/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/v0/mod.rs @@ -11,7 +11,8 @@ use crate::drive::votes::paths::{ use crate::drive::votes::storage_form::contested_document_resource_reference_storage_form::ContestedDocumentResourceVoteReferenceStorageForm; use crate::query::QueryItem; use crate::util::grove_operations::BatchDeleteApplyType; -use dpp::prelude::Identifier; +use dpp::dashcore::Network; +use dpp::prelude::{BlockHeight, Identifier}; use dpp::version::PlatformVersion; use grovedb::query_result_type::QueryResultType::QueryPathKeyElementTrioResultType; use grovedb::{PathQuery, Query, SizedQuery, TransactionArg}; @@ -22,6 +23,9 @@ impl Drive { pub(super) fn remove_all_votes_given_by_identities_v0( &self, identity_ids_as_byte_arrays: Vec>, + block_height: BlockHeight, + network: Network, + chain_id: &str, transaction: TransactionArg, platform_version: &PlatformVersion, ) -> Result<(), Error> { @@ -112,9 +116,25 @@ impl Drive { } if !deletion_batch.is_empty() { + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state + // and updating the cache (https://github.com/dashpay/platform/pull/2305). + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable this fix at the block when we consider the state is consistent. + let transaction = + if network == Network::Dash && chain_id == "evo1" && block_height < 33000 { + // Old behaviour on mainnet + None + } else { + // We should use transaction + transaction + }; + self.apply_batch_low_level_drive_operations( None, - None, + transaction, deletion_batch, &mut vec![], &platform_version.drive, From 99fe5fa402ddcae329795bb29eaf73fd5044b4f2 Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Mon, 4 Nov 2024 23:16:36 +0700 Subject: [PATCH 04/21] add comment --- packages/strategy-tests/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/strategy-tests/src/lib.rs b/packages/strategy-tests/src/lib.rs index 65e8a51f85..c2d81ea990 100644 --- a/packages/strategy-tests/src/lib.rs +++ b/packages/strategy-tests/src/lib.rs @@ -1312,7 +1312,7 @@ impl Strategy { &sender.0, &recipient.0, identity_nonce_counter, - signer, // Does this mean the loaded identity must be the sender since we're signing with it? + signer, // This means the TUI loaded identity must always be the sender since we're always signing with it for now transfer_info.amount, ); operations.push(state_transition); From 0d3e091a5591fde03dbfd8e501864b5ffa2e3602 Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 13:07:49 +0700 Subject: [PATCH 05/21] comment --- packages/rs-drive-abci/tests/strategy_tests/strategy.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs index 4d1a7ccb62..bf3235ea78 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs @@ -404,6 +404,8 @@ impl NetworkStrategy { ); state_transitions.append(&mut new_transitions); } + // Extend the state transitions with the strategy's hard coded start identities + // Filtering out the ones that have no create transition if !self.strategy.start_identities.hard_coded.is_empty() { state_transitions.extend( self.strategy.start_identities.hard_coded.iter().filter_map( From e4215145ad7889300810472c5a91f15ae987c1eb Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 13:16:38 +0700 Subject: [PATCH 06/21] use into_iter instead of iter --- .../tests/strategy_tests/main.rs | 4 ++-- .../tests/strategy_tests/voting_tests.rs | 20 +++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/rs-drive-abci/tests/strategy_tests/main.rs b/packages/rs-drive-abci/tests/strategy_tests/main.rs index f2122d627e..03bb92bc1a 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/main.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/main.rs @@ -2603,8 +2603,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let strategy = NetworkStrategy { diff --git a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs index 2264a4cd5f..83834520c0 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs @@ -87,8 +87,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform @@ -375,8 +375,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform @@ -651,8 +651,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform @@ -1008,8 +1008,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform @@ -1377,8 +1377,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform From 3d941ec5ab7f947e53d41c68eb5758ff2e0bd074 Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 13:31:07 +0700 Subject: [PATCH 07/21] use current identities instead of hardcoded start identities --- packages/strategy-tests/src/lib.rs | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/packages/strategy-tests/src/lib.rs b/packages/strategy-tests/src/lib.rs index c2d81ea990..efdb702a48 100644 --- a/packages/strategy-tests/src/lib.rs +++ b/packages/strategy-tests/src/lib.rs @@ -1290,33 +1290,32 @@ impl Strategy { // Generate state transition for identity transfer operation OperationType::IdentityTransfer(identity_transfer_info) => { for _ in 0..count { + // Handle the case where specific sender, recipient, and amount are provided if let Some(transfer_info) = identity_transfer_info { - let sender = self - .start_identities - .hard_coded + let sender = current_identities .iter() - .find(|(identity, _)| identity.id() == transfer_info.from) + .find(|identity| identity.id() == transfer_info.from) .expect( "Expected to find sender identity in hardcoded start identities", ); - let recipient = self - .start_identities - .hard_coded + let recipient = current_identities .iter() - .find(|(identity, _)| identity.id() == transfer_info.to) + .find(|identity| identity.id() == transfer_info.to) .expect( "Expected to find recipient identity in hardcoded start identities", ); let state_transition = create_identity_credit_transfer_transition( - &sender.0, - &recipient.0, + &sender, + &recipient, identity_nonce_counter, - signer, // This means the TUI loaded identity must always be the sender since we're always signing with it for now + signer, // This means in the TUI, the loaded identity must always be the sender since we're always signing with it for now transfer_info.amount, ); operations.push(state_transition); } else if current_identities.len() > 1 { + // Handle the case where no sender, recipient, and amount are provided + let identities_count = current_identities.len(); if identities_count == 0 { break; From 4bc0a653de328adacae4e8e85ab6971c42984786 Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 16:19:48 +0700 Subject: [PATCH 08/21] let transfer keys be any security level or key type --- packages/strategy-tests/src/transitions.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/strategy-tests/src/transitions.rs b/packages/strategy-tests/src/transitions.rs index c77b51e290..a9cb113d83 100644 --- a/packages/strategy-tests/src/transitions.rs +++ b/packages/strategy-tests/src/transitions.rs @@ -824,8 +824,8 @@ pub fn create_identity_credit_transfer_transition( let identity_public_key = identity .get_first_public_key_matching( Purpose::TRANSFER, - HashSet::from([SecurityLevel::CRITICAL]), - HashSet::from([KeyType::ECDSA_SECP256K1, KeyType::BLS12_381]), + SecurityLevel::full_range().into(), + KeyType::all_key_types().into(), false, ) .expect("expected to get a signing key"); From dc4882725f6ed6f7a6e4bc8835d84d95bd4ec41f Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 17:44:37 +0700 Subject: [PATCH 09/21] fix --- packages/strategy-tests/src/transitions.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/strategy-tests/src/transitions.rs b/packages/strategy-tests/src/transitions.rs index a9cb113d83..c77b51e290 100644 --- a/packages/strategy-tests/src/transitions.rs +++ b/packages/strategy-tests/src/transitions.rs @@ -824,8 +824,8 @@ pub fn create_identity_credit_transfer_transition( let identity_public_key = identity .get_first_public_key_matching( Purpose::TRANSFER, - SecurityLevel::full_range().into(), - KeyType::all_key_types().into(), + HashSet::from([SecurityLevel::CRITICAL]), + HashSet::from([KeyType::ECDSA_SECP256K1, KeyType::BLS12_381]), false, ) .expect("expected to get a signing key"); From ae97f47b72a9e5a1c914469107865cb971929878 Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Wed, 6 Nov 2024 21:56:50 +0700 Subject: [PATCH 10/21] ci: run devcontainers workflow only on push to master (#2295) --- .github/workflows/prebuild-devcontainers.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/prebuild-devcontainers.yml b/.github/workflows/prebuild-devcontainers.yml index 794fa3d4a5..1825985c82 100644 --- a/.github/workflows/prebuild-devcontainers.yml +++ b/.github/workflows/prebuild-devcontainers.yml @@ -7,6 +7,8 @@ on: - '.github/workflows/prebuild-devcontainers.yml' - rust-toolchain.toml - Dockerfile + branches: + - master workflow_dispatch: concurrency: From 48cca1a319505ff7abda72c9a3bb87883d6e8a07 Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Wed, 6 Nov 2024 21:57:07 +0700 Subject: [PATCH 11/21] ci: do not run test on push (#2308) --- .github/workflows/tests.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c823d0cd06..cca5f1c471 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -7,12 +7,6 @@ on: branches: - master - 'v[0-9]+\.[0-9]+-dev' - push: - branches: - - master - - 'v[0-9]+\.[0-9]+-dev' - schedule: - - cron: "30 4 * * *" concurrency: group: ${{ github.workflow }}-${{ github.ref }} From 5c80069de3937c439b9646321df3951c57406e32 Mon Sep 17 00:00:00 2001 From: lklimek <842586+lklimek@users.noreply.github.com> Date: Fri, 8 Nov 2024 10:50:04 +0100 Subject: [PATCH 12/21] ci: use prebuilt librocksdb in github actions (#2316) --- .github/actions/librocksdb/action.yaml | 55 ++++++++++++++++++++++ .github/workflows/cached.yml | 23 +++++++++ .github/workflows/docs.yml | 2 +- .github/workflows/manage-runs.yml | 2 +- .github/workflows/pr.yml | 2 +- .github/workflows/release-docker-image.yml | 2 +- .github/workflows/release.yml | 6 +-- .github/workflows/tests-codeql.yml | 2 +- .github/workflows/tests-js-package.yml | 4 +- .github/workflows/tests-rs-package.yml | 37 +++++++++++++-- .github/workflows/tests.yml | 40 ++++++++++++---- 11 files changed, 152 insertions(+), 23 deletions(-) create mode 100644 .github/actions/librocksdb/action.yaml create mode 100644 .github/workflows/cached.yml diff --git a/.github/actions/librocksdb/action.yaml b/.github/actions/librocksdb/action.yaml new file mode 100644 index 0000000000..895185f43d --- /dev/null +++ b/.github/actions/librocksdb/action.yaml @@ -0,0 +1,55 @@ +--- +# This action builds and caches librocksdb. If we find that this solution consumes too much time, we can consider +# prebuilding librocksdb outside of the pipeline (eg. in the grovedb release process), publish as an artifact, and +# download it in the pipeline. +name: "librocksdb" +description: "Build and install librocksdb" +inputs: + version: + description: RocksDB version, eg. "8.10.2" + required: false + default: "8.10.2" + bucket: + description: S3 bucket to use for caching + required: false + default: multi-runner-cache-x1xibo9c + force: + description: Force rebuild + required: false + default: "false" + +runs: + using: composite + steps: + # Cache librocksdb using s3 bucket + - name: Restore cached librocksdb from S3 + id: librocksdb-cache + uses: strophy/actions-cache@opendal-update + with: + bucket: ${{ inputs.bucket }} + path: /opt/rocksdb + key: librocksdb/${{ inputs.version }}/${{ runner.os }}/${{ runner.arch }} + + - if: ${{ steps.librocksdb-cache.outputs.cache-hit != 'true' || inputs.force == 'true' }} + shell: bash + name: Build librocksdb + run: | + set -ex + WORKDIR=/tmp/rocksdb-build + mkdir -p ${WORKDIR}/rocksdb + mkdir -p /opt/rocksdb/usr/local/lib/ + pushd ${WORKDIR}/rocksdb + + # building rocksdb + git clone https://github.com/facebook/rocksdb.git -b v${{ inputs.version }} --depth 1 . + make -j$(nproc) static_lib + make DESTDIR=/opt/rocksdb install-static + set +x + + echo Done. + echo Configuration: + echo + echo "ROCKSDB_STATIC='/opt/rocksdb/usr/local/lib/librocksdb.a'" + echo "ROCKSDB_LIB_DIR='/opt/rocksdb/usr/local/lib'" + + popd diff --git a/.github/workflows/cached.yml b/.github/workflows/cached.yml new file mode 100644 index 0000000000..12eed8dd3e --- /dev/null +++ b/.github/workflows/cached.yml @@ -0,0 +1,23 @@ +--- +name: Rebuild cached dependencies + +on: + workflow_dispatch: +jobs: + build-rust-deps: + name: Prebuild and cache some Rust dependencies + runs-on: ubuntu-24.04 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ${{ secrets.AWS_REGION }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 1 + - name: Precompile librocksdb + uses: ./.github/actions/librocksdb + with: + force: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 12f512b720..d8e24ef706 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -7,7 +7,7 @@ on: jobs: build: name: Deploy docs - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Checkout main uses: actions/checkout@v4 diff --git a/.github/workflows/manage-runs.yml b/.github/workflows/manage-runs.yml index 2c07af5b4f..29bc43aa53 100644 --- a/.github/workflows/manage-runs.yml +++ b/.github/workflows/manage-runs.yml @@ -7,7 +7,7 @@ on: jobs: cancel-merged-or-closed-pr-runs: name: Cancel runs for merged or closed PRs - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - uses: octokit/request-action@v2.x id: get_active_workflows diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index d475a3eef8..e75151c841 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -10,7 +10,7 @@ on: jobs: pr-title: name: PR title - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Validate conventional PR title uses: amannn/action-semantic-pull-request@v5 diff --git a/.github/workflows/release-docker-image.yml b/.github/workflows/release-docker-image.yml index 5c56ade272..54e4cb465a 100644 --- a/.github/workflows/release-docker-image.yml +++ b/.github/workflows/release-docker-image.yml @@ -89,7 +89,7 @@ jobs: publish-manifest: name: Publish image tags needs: build-image - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Download digests uses: actions/download-artifact@v3 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index dba5d592f5..4c43639d3a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -208,11 +208,11 @@ jobs: matrix: include: - package_type: tarballs - os: ubuntu-22.04 + os: ubuntu-24.04 - package_type: win - os: ubuntu-22.04 + os: ubuntu-24.04 - package_type: deb - os: ubuntu-22.04 + os: ubuntu-24.04 - package_type: macos os: macos-14 steps: diff --git a/.github/workflows/tests-codeql.yml b/.github/workflows/tests-codeql.yml index d00a66c8df..78a3f53aeb 100644 --- a/.github/workflows/tests-codeql.yml +++ b/.github/workflows/tests-codeql.yml @@ -4,7 +4,7 @@ on: jobs: codeql: name: Run Code QL - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: actions: read contents: read diff --git a/.github/workflows/tests-js-package.yml b/.github/workflows/tests-js-package.yml index ef508ec073..bdffc8cd41 100644 --- a/.github/workflows/tests-js-package.yml +++ b/.github/workflows/tests-js-package.yml @@ -17,7 +17,7 @@ on: jobs: lint: name: Linting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: id-token: write contents: read @@ -51,7 +51,7 @@ jobs: test: name: Tests - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: id-token: write contents: read diff --git a/.github/workflows/tests-rs-package.yml b/.github/workflows/tests-rs-package.yml index e666491ebc..a05e68ee6c 100644 --- a/.github/workflows/tests-rs-package.yml +++ b/.github/workflows/tests-rs-package.yml @@ -12,7 +12,7 @@ on: lint-runner: description: Runner for linting. Must be JSON valid string. type: string - default: '"ubuntu-22.04"' + default: '"ubuntu-24.04"' check-each-feature: description: If true, try to build each individual feature for this crate type: boolean @@ -42,6 +42,9 @@ jobs: with: components: clippy + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - uses: clechasseur/rs-clippy-check@v3 with: args: --package ${{ inputs.package }} --all-features --locked -- --no-deps @@ -50,10 +53,12 @@ jobs: SCCACHE_BUCKET: multi-runner-cache-x1xibo9c SCCACHE_REGION: ${{ secrets.AWS_REGION }} SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" formatting: name: Formatting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 timeout-minutes: 5 steps: - name: Check out repo @@ -65,12 +70,19 @@ jobs: components: rustfmt cache: false + # This step doesn't need librocksdb, so we don't install it + - name: Check formatting + env: + RUSTC_WRAPPER: sccache + SCCACHE_BUCKET: multi-runner-cache-x1xibo9c + SCCACHE_REGION: ${{ secrets.AWS_REGION }} + SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu run: cargo fmt --check --package=${{ inputs.package }} unused_deps: name: Unused dependencies - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: id-token: write contents: read @@ -89,6 +101,9 @@ jobs: - name: Setup Rust uses: ./.github/actions/rust + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - name: Get crate ${{ inputs.package }} info id: crate_info uses: ./.github/actions/crate_info @@ -102,12 +117,14 @@ jobs: SCCACHE_BUCKET: multi-runner-cache-x1xibo9c SCCACHE_REGION: ${{ secrets.AWS_REGION }} SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" with: args: ${{ steps.crate_info.outputs.cargo_manifest_dir }} detect_structure_changes: name: Detect immutable structure changes - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # FIXME: as we use `gh pr view` below, this check can only # run on pull requests. We should find a way to run it # when manual triggers are used. @@ -184,6 +201,9 @@ jobs: - name: Setup Rust uses: ./.github/actions/rust + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - name: Run tests run: cargo test --package=${{ inputs.package }} --all-features --locked env: @@ -191,6 +211,8 @@ jobs: SCCACHE_BUCKET: multi-runner-cache-x1xibo9c SCCACHE_REGION: ${{ secrets.AWS_REGION }} SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" check_each_feature: name: Check each feature @@ -199,7 +221,7 @@ jobs: if: ${{ inputs.check-each-feature }} steps: - name: Check out repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Configure AWS credentials and bucket region uses: aws-actions/configure-aws-credentials@v4 @@ -211,6 +233,9 @@ jobs: - name: Setup Rust uses: ./.github/actions/rust + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - name: Get crate ${{ runner.arch }} info id: crate_info uses: ./.github/actions/crate_info @@ -223,6 +248,8 @@ jobs: SCCACHE_BUCKET: multi-runner-cache-x1xibo9c SCCACHE_REGION: ${{ secrets.AWS_REGION }} SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" run: | echo Verify all features disabled set -ex diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cca5f1c471..5f6cec2c08 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,6 +2,11 @@ name: Tests on: workflow_dispatch: + inputs: + rebuild-deps: + description: "Rebuild cached Rust dependencies" + required: false + default: "false" pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: @@ -16,13 +21,13 @@ jobs: changes: name: Determine changed packages if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 outputs: js-packages: ${{ steps.filter-js.outputs.changes }} rs-packages: ${{ steps.filter-rs.outputs.changes }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 @@ -36,6 +41,22 @@ jobs: with: filters: .github/package-filters/rs-packages.yml + build-rust-deps: + name: Prebuild and cache some Rust dependencies + runs-on: ubuntu-24.04 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ${{ secrets.AWS_REGION }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 1 + - name: Precompile librocksdb + uses: ./.github/actions/librocksdb + build-js: name: Build JS packages if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} @@ -69,6 +90,7 @@ jobs: name: Rust packages needs: - changes + - build-rust-deps secrets: inherit strategy: fail-fast: false @@ -77,17 +99,19 @@ jobs: uses: ./.github/workflows/tests-rs-package.yml with: package: ${{ matrix.rs-package }} - # lint-runner: ${{ contains(fromJSON('["drive-abci", "drive"]'), matrix.rs-package) && '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' || '"ubuntu-22.04"' }} + # lint-runner: ${{ contains(fromJSON('["drive-abci", "drive"]'), matrix.rs-package) && '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' || '"ubuntu-24.04"' }} # FIXME: Clippy fails on github hosted runners, most likely due to RAM usage. Using self-hosted runners for now. - lint-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' + # lint-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' + lint-runner: '["ubuntu-24.04"]' # Run drive tests on self-hosted 4x - test-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' + # test-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' + test-runner: '["ubuntu-24.04"]' check-each-feature: ${{ contains(fromJSON('["dash-sdk","rs-dapi-client","dapi-grpc","dpp","drive-abci"]'), matrix.rs-package) }} rs-crates-security: name: Rust crates security audit if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check out repo uses: actions/checkout@v4 @@ -116,7 +140,7 @@ jobs: js-deps-versions: name: JS dependency versions check if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check out repo uses: actions/checkout@v4 @@ -135,7 +159,7 @@ jobs: js-npm-security: name: JS NPM security audit if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check out repo uses: actions/checkout@v4 From fc0082d429d8e747420d0949e9fcaa74e4c9da7e Mon Sep 17 00:00:00 2001 From: ivanshumkov Date: Sat, 9 Nov 2024 13:15:14 +0700 Subject: [PATCH 13/21] build: use cargo binstall to speed up builds --- .github/actions/rust/action.yaml | 4 +- .github/workflows/tests-build-js.yml | 8 ++++ Dockerfile | 49 +++++++++++++++---------- README.md | 2 +- packages/wasm-dpp/README.md | 2 +- packages/wasm-dpp/scripts/build-wasm.sh | 4 +- 6 files changed, 44 insertions(+), 25 deletions(-) diff --git a/.github/actions/rust/action.yaml b/.github/actions/rust/action.yaml index 8854007013..9fd060c4b3 100644 --- a/.github/actions/rust/action.yaml +++ b/.github/actions/rust/action.yaml @@ -85,9 +85,9 @@ runs: export PATH="${PATH}:${HOME}/.local/bin" - name: Run sccache-cache - uses: mozilla-actions/sccache-action@v0.0.3 + uses: mozilla-actions/sccache-action@v0.0.6 with: - version: "v0.7.1" # Must be the same as in Dockerfile + version: "v0.8.2" # Must be the same as in Dockerfile if: inputs.cache == 'true' - name: Hash ref_name diff --git a/.github/workflows/tests-build-js.yml b/.github/workflows/tests-build-js.yml index a367af55fc..d5b9561e26 100644 --- a/.github/workflows/tests-build-js.yml +++ b/.github/workflows/tests-build-js.yml @@ -46,6 +46,14 @@ jobs: with: target: wasm32-unknown-unknown + - name: Install Cargo binstall + uses: cargo-bins/cargo-binstall@main + if: ${{ steps.cache.outputs.cache-hit != 'true' }} + + - name: Install wasm-bindgen-cli + run: cargo binstall wasm-bindgen-cli@0.2.86 + if: ${{ steps.cache.outputs.cache-hit != 'true' }} + - name: Build JS packages run: yarn build if: ${{ steps.cache.outputs.cache-hit != 'true' }} diff --git a/Dockerfile b/Dockerfile index 5039a062b6..b35e55a8d4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -# syntax = docker/dockerfile:1.5 +# syntax = docker/dockerfile:1 # Docker image for rs-drive-abci # @@ -30,7 +30,6 @@ # SCCACHE_SERVER_PORT port to avoid conflicts in case of parallel compilation ARG ALPINE_VERSION=3.18 -ARG PROTOC_VERSION=27.3 ARG RUSTC_WRAPPER # @@ -82,7 +81,7 @@ RUN TOOLCHAIN_VERSION="$(grep channel rust-toolchain.toml | awk '{print $3}' | t # Install protoc - protobuf compiler # The one shipped with Alpine does not work -ARG PROTOC_VERSION +ARG PROTOC_VERSION=27.3 RUN if [[ "$TARGETARCH" == "arm64" ]] ; then export PROTOC_ARCH=aarch_64; else export PROTOC_ARCH=x86_64; fi; \ curl -Ls https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-${PROTOC_ARCH}.zip \ -o /tmp/protoc.zip && \ @@ -102,7 +101,7 @@ ENV NODE_ENV ${NODE_ENV} FROM deps-base AS deps-sccache -ARG SCCHACHE_VERSION=0.7.1 +ARG SCCHACHE_VERSION=0.8.2 # Install sccache for caching RUN if [[ "$TARGETARCH" == "arm64" ]] ; then export SCC_ARCH=aarch64; else export SCC_ARCH=x86_64; fi; \ @@ -156,22 +155,34 @@ ENV SCCACHE_S3_KEY_PREFIX=${SCCACHE_S3_KEY_PREFIX}/${TARGETARCH}/linux-musl WORKDIR /platform +# Download and install cargo-binstall +ENV BINSTALL_VERSION=1.10.11 +RUN set -ex; \ + if [ "$TARGETARCH" = "amd64" ]; then \ + CARGO_BINSTALL_ARCH="x86_64-unknown-linux-musl"; \ + elif [ "$TARGETARCH" = "arm64" ]; then \ + CARGO_BINSTALL_ARCH="aarch64-unknown-linux-musl"; \ + else \ + echo "Unsupported architecture: $TARGETARCH"; exit 1; \ + fi; \ + # Construct download URL + DOWNLOAD_URL="https://github.com/cargo-bins/cargo-binstall/releases/download/v${BINSTALL_VERSION}/cargo-binstall-${CARGO_BINSTALL_ARCH}.tgz"; \ + # Download and extract the cargo-binstall binary + curl -A "Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/81.0" -L --proto '=https' --tlsv1.2 -sSf "$DOWNLOAD_URL" | tar -xvzf -; \ + ./cargo-binstall -y --force cargo-binstall; \ + rm ./cargo-binstall; \ + source $HOME/.cargo/env; \ + cargo binstall -V + # Install wasm-bindgen-cli in the same profile as other components, to sacrifice some performance & disk space to gain # better build caching -RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOME}/registry/index \ - --mount=type=cache,sharing=shared,id=cargo_registry_cache,target=${CARGO_HOME}/registry/cache \ - --mount=type=cache,sharing=shared,id=cargo_git,target=${CARGO_HOME}/git/db \ - --mount=type=cache,sharing=shared,id=target_${TARGETARCH},target=/platform/target \ - export SCCACHE_SERVER_PORT=$((RANDOM+1025)) && \ - source $HOME/.cargo/env && \ - if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ - RUSTFLAGS="-C target-feature=-crt-static" \ - CARGO_TARGET_DIR="/platform/target" \ - # TODO: Build wasm with build.rs - # Meanwhile if you want to update wasm-bindgen you also need to update version in: - # - packages/wasm-dpp/Cargo.toml - # - packages/wasm-dpp/scripts/build-wasm.sh - cargo install --profile "$CARGO_BUILD_PROFILE" wasm-bindgen-cli@0.2.86 cargo-chef@0.1.67 --locked +RUN source $HOME/.cargo/env; \ + cargo binstall wasm-bindgen-cli@0.2.86 cargo-chef@0.1.67 \ + --locked \ + --no-discover-github-token \ + --disable-telemetry \ + --no-track \ + --no-confirm # # Rust build planner to speed up builds @@ -232,7 +243,7 @@ RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOM else \ export FEATURES_FLAG="--features=console,grovedbg" ; \ export OUT_DIRECTORY=debug ; \ - + fi && \ if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ cargo build \ diff --git a/README.md b/README.md index 3b2bd65ecb..8c5254f33c 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ this repository may be used on the following networks: in terminal run `echo 'export PATH="/opt/homebrew/opt/llvm/bin:$PATH"' >> ~/.zshrc` or `echo 'export PATH="/opt/homebrew/opt/llvm/bin:$PATH"' >> ~/.bash_profile` depending on your default shell. You can find your default shell with `echo $SHELL` - Reload your shell with `source ~/.zshrc` or `source ~/.bash_profile` - - `cargo install wasm-bindgen-cli@0.2.85` + - `cargo install wasm-bindgen-cli@0.2.86` - *double-check that wasm-bindgen-cli version above matches wasm-bindgen version in Cargo.lock file* - *Depending on system, additional packages may need to be installed as a prerequisite for wasm-bindgen-cli. If anything is missing, installation will error and prompt what packages are missing (i.e. clang, llvm, libssl-dev)* - essential build tools - example for Debian/Ubuntu: `apt install -y build-essential libssl-dev pkg-config clang cmake llvm` diff --git a/packages/wasm-dpp/README.md b/packages/wasm-dpp/README.md index cc31d1a824..73b3494c45 100644 --- a/packages/wasm-dpp/README.md +++ b/packages/wasm-dpp/README.md @@ -39,7 +39,7 @@ Library consumers must ignore class names minification for `@dashevo/wasm-dpp` l - Install [Rust](https://www.rust-lang.org/tools/install) v1.73+ - Add wasm32 target: `$ rustup target add wasm32-unknown-unknown` -- Install wasm-bingen-cli: `cargo install wasm-bindgen-cli@0.2.85` +- Install wasm-bingen-cli: `cargo install wasm-bindgen-cli@0.2.86` - *double-check that wasm-bindgen-cli version above matches wasm-bindgen version in Cargo.lock file* - *Depending on system, additional packages may need to be installed as a prerequisite for wasm-bindgen-cli. If anything is missing, installation will error and prompt what packages are missing (i.e. clang, llvm, libssl-dev)* diff --git a/packages/wasm-dpp/scripts/build-wasm.sh b/packages/wasm-dpp/scripts/build-wasm.sh index 2b89311437..0c154372ba 100755 --- a/packages/wasm-dpp/scripts/build-wasm.sh +++ b/packages/wasm-dpp/scripts/build-wasm.sh @@ -28,8 +28,8 @@ fi # - packages/wasm-dpp/Cargo.toml # - Dockerfile if ! [[ -x "$(command -v wasm-bindgen)" ]]; then - echo "Wasm-bindgen CLI ${WASM_BINDGEN_VERSION} is not installed. Installing" - cargo install --config net.git-fetch-with-cli=true --profile "${CARGO_BUILD_PROFILE}" -f "wasm-bindgen-cli@0.2.86" + echo "Wasm-bindgen CLI ${WASM_BINDGEN_VERSION} is not installed." + exit 1 fi # On a mac, bundled clang won't work - you need to install LLVM manually through brew, From d4c2f7dba878ec35306d7807744107124584543b Mon Sep 17 00:00:00 2001 From: ivanshumkov Date: Sat, 9 Nov 2024 13:48:59 +0700 Subject: [PATCH 14/21] build: use binstall in devcontainer too --- .devcontainer/Dockerfile | 32 +++++++++++++++++++++++--------- Dockerfile | 2 -- 2 files changed, 23 insertions(+), 11 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index e0a517ab06..ce363420ef 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -44,16 +44,30 @@ ENV PATH=$CARGO_HOME/bin:$PATH COPY rust-toolchain.toml . RUN TOOLCHAIN_VERSION="$(grep channel rust-toolchain.toml | awk '{print $3}' | tr -d '"')" && \ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- \ - --profile minimal \ -y \ --default-toolchain "${TOOLCHAIN_VERSION}" \ --target wasm32-unknown-unknown -# Install wasm-bindgen-cli in the same profile as other components, to sacrifice some performance & disk space to gain -# better build caching -RUN if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ - RUSTFLAGS="-C target-feature=-crt-static" \ - # Meanwhile if you want to update wasm-bindgen you also need to update version in: - # - packages/wasm-dpp/Cargo.toml - # - packages/wasm-dpp/scripts/build-wasm.sh - cargo install wasm-bindgen-cli@0.2.86 --locked +# Download and install cargo-binstall +ENV BINSTALL_VERSION=1.10.11 +RUN set -ex; \ + if [ "$TARGETARCH" = "amd64" ]; then \ + CARGO_BINSTALL_ARCH="x86_64-unknown-linux-musl"; \ + elif [ "$TARGETARCH" = "arm64" ] || [ "$TARGETARCH" = "aarch64" ]; then \ + CARGO_BINSTALL_ARCH="aarch64-unknown-linux-musl"; \ + else \ + echo "Unsupported architecture: $TARGETARCH"; exit 1; \ + fi; \ + DOWNLOAD_URL="https://github.com/cargo-bins/cargo-binstall/releases/download/v${BINSTALL_VERSION}/cargo-binstall-${CARGO_BINSTALL_ARCH}.tgz"; \ + curl -L --fail --show-error "$DOWNLOAD_URL" -o /tmp/cargo-binstall.tgz; \ + tar -xzf /tmp/cargo-binstall.tgz -C /tmp cargo-binstall; \ + chmod +x /tmp/cargo-binstall; \ + /tmp/cargo-binstall -y --force cargo-binstall; \ + rm /tmp/cargo-binstall; \ + cargo binstall -V + +RUN cargo binstall wasm-bindgen-cli@0.2.86 --locked \ + --no-discover-github-token \ + --disable-telemetry \ + --no-track \ + --no-confirm diff --git a/Dockerfile b/Dockerfile index b35e55a8d4..322c803d15 100644 --- a/Dockerfile +++ b/Dockerfile @@ -174,8 +174,6 @@ RUN set -ex; \ source $HOME/.cargo/env; \ cargo binstall -V -# Install wasm-bindgen-cli in the same profile as other components, to sacrifice some performance & disk space to gain -# better build caching RUN source $HOME/.cargo/env; \ cargo binstall wasm-bindgen-cli@0.2.86 cargo-chef@0.1.67 \ --locked \ From 681bf7295f63a8163860c2b930cd36b1a03011db Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Mon, 11 Nov 2024 18:54:15 +0700 Subject: [PATCH 15/21] ci: install wasm bindgen for release --- .github/workflows/release.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4c43639d3a..c71e25e1c1 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -69,6 +69,14 @@ jobs: - name: Setup Node.JS uses: ./.github/actions/nodejs + - name: Install Cargo binstall + uses: cargo-bins/cargo-binstall@main + if: ${{ steps.cache.outputs.cache-hit != 'true' }} + + - name: Install wasm-bindgen-cli + run: cargo binstall wasm-bindgen-cli@0.2.86 + if: ${{ steps.cache.outputs.cache-hit != 'true' }} + - name: Build packages run: yarn build env: From 1ac8d3bc6f973d11164181d79e6c09e5cef73dff Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Mon, 11 Nov 2024 20:25:03 +0700 Subject: [PATCH 16/21] build: CARGO_HOME is not defined --- Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 322c803d15..8ee9a52859 100644 --- a/Dockerfile +++ b/Dockerfile @@ -35,7 +35,7 @@ ARG RUSTC_WRAPPER # # DEPS: INSTALL AND CACHE DEPENDENCIES # -FROM node:20-alpine${ALPINE_VERSION} as deps-base +FROM node:20-alpine${ALPINE_VERSION} AS deps-base # # Install some dependencies @@ -94,10 +94,10 @@ RUN rm /usr/bin/cc && ln -s /usr/bin/clang /usr/bin/cc # Select whether we want dev or release ARG CARGO_BUILD_PROFILE=dev -ENV CARGO_BUILD_PROFILE ${CARGO_BUILD_PROFILE} +ENV CARGO_BUILD_PROFILE=${CARGO_BUILD_PROFILE} ARG NODE_ENV=production -ENV NODE_ENV ${NODE_ENV} +ENV NODE_ENV=${NODE_ENV} FROM deps-base AS deps-sccache @@ -204,6 +204,7 @@ SHELL ["/bin/bash", "-o", "pipefail","-e", "-x", "-c"] ARG SCCACHE_S3_KEY_PREFIX ENV SCCACHE_S3_KEY_PREFIX=${SCCACHE_S3_KEY_PREFIX}/${TARGETARCH}/linux-musl +ENV CARGO_HOME=$HOME/.cargo WORKDIR /platform @@ -241,7 +242,6 @@ RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOM else \ export FEATURES_FLAG="--features=console,grovedbg" ; \ export OUT_DIRECTORY=debug ; \ - fi && \ if [[ -z "${SCCACHE_MEMCACHED}" ]] ; then unset SCCACHE_MEMCACHED ; fi ; \ cargo build \ From fa4cf998566deaeadf670d59800a19920247d266 Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Wed, 13 Nov 2024 08:57:57 +0700 Subject: [PATCH 17/21] build: fix sccache error --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 8ee9a52859..caedf8c163 100644 --- a/Dockerfile +++ b/Dockerfile @@ -117,7 +117,7 @@ ARG RUSTC_WRAPPER ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} # Set args below to use Github Actions cache; see https://github.com/mozilla/sccache/blob/main/docs/GHA.md -ARG SCCACHE_GHA_ENABLED +ARG SCCACHE_GHA_ENABLED="false" ENV SCCACHE_GHA_ENABLED=${SCCACHE_GHA_ENABLED} ARG ACTIONS_CACHE_URL From 6584fae2dd6888742d4fb5c3729c587ed2ad51cc Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Wed, 13 Nov 2024 08:59:13 +0700 Subject: [PATCH 18/21] ci: use version instead of branch --- .github/workflows/release.yml | 2 +- .github/workflows/tests-build-js.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c71e25e1c1..fb854289a4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -70,7 +70,7 @@ jobs: uses: ./.github/actions/nodejs - name: Install Cargo binstall - uses: cargo-bins/cargo-binstall@main + uses: cargo-bins/cargo-binstall@v1 if: ${{ steps.cache.outputs.cache-hit != 'true' }} - name: Install wasm-bindgen-cli diff --git a/.github/workflows/tests-build-js.yml b/.github/workflows/tests-build-js.yml index d5b9561e26..c094c49ec7 100644 --- a/.github/workflows/tests-build-js.yml +++ b/.github/workflows/tests-build-js.yml @@ -47,7 +47,7 @@ jobs: target: wasm32-unknown-unknown - name: Install Cargo binstall - uses: cargo-bins/cargo-binstall@main + uses: cargo-bins/cargo-binstall@v1 if: ${{ steps.cache.outputs.cache-hit != 'true' }} - name: Install wasm-bindgen-cli From 113e695d6d51187f46679515b66cae0e88c0b5c4 Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Wed, 13 Nov 2024 21:15:11 +0700 Subject: [PATCH 19/21] ci: specify correct version --- .github/workflows/release.yml | 2 +- .github/workflows/tests-build-js.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 67855dba85..4fc9d0cde1 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -70,7 +70,7 @@ jobs: uses: ./.github/actions/nodejs - name: Install Cargo binstall - uses: cargo-bins/cargo-binstall@v1 + uses: cargo-bins/cargo-binstall@v1.3.1 if: ${{ steps.cache.outputs.cache-hit != 'true' }} - name: Install wasm-bindgen-cli diff --git a/.github/workflows/tests-build-js.yml b/.github/workflows/tests-build-js.yml index 8f1bc52bf3..fbf9082512 100644 --- a/.github/workflows/tests-build-js.yml +++ b/.github/workflows/tests-build-js.yml @@ -47,7 +47,7 @@ jobs: target: wasm32-unknown-unknown - name: Install Cargo binstall - uses: cargo-bins/cargo-binstall@v1 + uses: cargo-bins/cargo-binstall@v1.3.1 if: ${{ steps.cache.outputs.cache-hit != 'true' }} - name: Install wasm-bindgen-cli From 6e02f6aa4c11379ede2a746c102f14ae1aad90c6 Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Wed, 13 Nov 2024 21:18:16 +0700 Subject: [PATCH 20/21] build: set HOME variable --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index c45cc31b10..d03f82d895 100644 --- a/Dockerfile +++ b/Dockerfile @@ -92,6 +92,7 @@ RUN TOOLCHAIN_VERSION="$(grep channel rust-toolchain.toml | awk '{print $3}' | t --default-toolchain "${TOOLCHAIN_VERSION}" \ --target wasm32-unknown-unknown +ONBUILD ENV HOME=/root ONBUILD ENV CARGO_HOME=$HOME/.cargo # Install protoc - protobuf compiler From e395f2e689c23de4fa7b43029739737f42bb3f4b Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Wed, 13 Nov 2024 21:28:37 +0700 Subject: [PATCH 21/21] ci: disable saving mount cache --- .github/actions/docker/action.yaml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/actions/docker/action.yaml b/.github/actions/docker/action.yaml index 69bc9989d6..a3c132c2df 100644 --- a/.github/actions/docker/action.yaml +++ b/.github/actions/docker/action.yaml @@ -115,9 +115,10 @@ runs: cache-to: ${{ steps.layer_cache_settings.outputs.cache_to }} outputs: type=image,name=${{ inputs.image_org }}/${{ inputs.image_name }},push-by-digest=${{ inputs.push_tags != 'true' }},name-canonical=true,push=true - - name: Save Docker mount cache - uses: dcginfra/buildkit-cache-dance/extract@s5cmd - if: ${{ inputs.cache_mounts != '' }} - with: - bucket: ${{ inputs.bucket }} - mounts: ${{ inputs.cache_mounts }} +# TODO: This is doesn't work +# - name: Save Docker mount cache +# uses: dcginfra/buildkit-cache-dance/extract@s5cmd +# if: ${{ inputs.cache_mounts != '' }} +# with: +# bucket: ${{ inputs.bucket }} +# mounts: ${{ inputs.cache_mounts }}