From d37f4c9a63bbea35def414edfeeb2c1e79e6cee9 Mon Sep 17 00:00:00 2001 From: Vadim <31490938+n00m4d@users.noreply.github.com> Date: Wed, 14 Aug 2024 09:59:49 +0200 Subject: [PATCH] Creators and collection batch mint verifications (#204) * feat: add creators and collection bath mint verifications * style: fmt * Update blockbuster/blockbuster/src/programs/bubblegum/mod.rs Co-authored-by: Stanislav Cherviakov * chore: dependency change * chore: renaming --------- Co-authored-by: Stanislav Cherviakov --- .gitmodules | 4 - Cargo.toml | 11 +- blockbuster/blockbuster/Cargo.toml | 8 +- .../blockbuster/src/programs/bubblegum/mod.rs | 53 +- .../src/dao/generated/batch_mint_to_verify.rs | 3 + .../src/dao/generated/prelude.rs | 2 - .../src/dao/generated/sea_orm_active_enums.rs | 204 ++--- digital_asset_types/tests/json_parsing.rs | 2 +- integration_tests/Cargo.toml | 3 + .../integration_tests/batch_mint_tests.rs | 773 +++++++++++++++++- ...alize_tree_with_root_instruction_handle.rs | 6 + program_transformers/Cargo.toml | 2 + .../src/batch_minting/batch_mint_persister.rs | 74 +- .../src/batch_minting/tests.rs | 23 +- .../src/bubblegum/finalize_tree_with_root.rs | 3 +- program_transformers/src/bubblegum/mod.rs | 8 +- program_transformers/src/error.rs | 8 + 17 files changed, 1000 insertions(+), 187 deletions(-) delete mode 100644 .gitmodules diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 0df22b35f..000000000 --- a/.gitmodules +++ /dev/null @@ -1,4 +0,0 @@ -[submodule "mpl-bubblegum"] - path = mpl-bubblegum - url = git@github.com:adm-metaex/mpl-bubblegum.git - branch = feat/staker-role-reference diff --git a/Cargo.toml b/Cargo.toml index e9de00676..01848e610 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -67,7 +67,7 @@ log = "0.4.17" metrics = "0.20.1" migration = {path = "migration"} mime_guess = "2.0.4" -mpl-bubblegum = { path = "blockbuster/mpl-bubblegum/clients/rust", features = ["serde"] } +mpl-bubblegum = { git = "https://github.com/adm-metaex/mpl-bubblegum.git", branch = "feature/cleanup", features = ["serde"] } mpl-core = { git = "https://github.com/RequescoS/mpl-core.git", features = ["serde"] } mpl-token-metadata = "4.1.1" nft_ingester = {path = "nft_ingester"} @@ -100,10 +100,10 @@ solana-program = "1.18.11" solana-sdk = "1.18.11" solana-transaction-status = "1.18.11" solana-zk-token-sdk = "1.17.16" -spl-account-compression = { path = "blockbuster/mpl-bubblegum/solana-program-library/account-compression/programs/account-compression", features = ["no-entrypoint"] } +spl-account-compression = { git = "https://github.com/StanChe/solana-program-library.git", branch = "feature/init_with_root", features = ["no-entrypoint"] } spl-associated-token-account = ">= 1.1.3, < 3.0" -spl-concurrent-merkle-tree = { path = "blockbuster/mpl-bubblegum/solana-program-library/libraries/concurrent-merkle-tree"} -spl-noop = { path = "blockbuster/mpl-bubblegum/solana-program-library/account-compression/programs/noop", features = ["no-entrypoint"] } +spl-concurrent-merkle-tree = { git = "https://github.com/StanChe/solana-program-library.git", branch = "feature/init_with_root" } +spl-noop = { git = "https://github.com/StanChe/solana-program-library.git", branch = "feature/init_with_root", features = ["no-entrypoint"] } spl-pod = {version = "0.1.0", features = ["serde-traits"]} spl-token = ">= 3.5.0, < 5.0" spl-token-2022 = {version = "1.0", features = ["no-entrypoint"]} @@ -111,7 +111,7 @@ spl-token-group-interface = "0.1.0" spl-token-metadata-interface = "0.2.0" sqlx = "0.6.2" stretto = "0.8.4" -thiserror = "1.0.31" +thiserror = "1.0.63" tokio = "1.30.0" tokio-stream = "0.1.14" tower = "0.4.13" @@ -131,6 +131,7 @@ serde_with = "3.8.1" bincode = "1.3.3" tempfile = "3.10.1" async-channel = "2.3.1" +bubblegum-batch-sdk = { git = "https://github.com/metaplex-foundation/bubblegum-batch-sdk.git", branch = "main" } [workspace.lints.clippy] clone_on_ref_ptr = "deny" diff --git a/blockbuster/blockbuster/Cargo.toml b/blockbuster/blockbuster/Cargo.toml index c9c949611..9919a3e44 100644 --- a/blockbuster/blockbuster/Cargo.toml +++ b/blockbuster/blockbuster/Cargo.toml @@ -11,9 +11,9 @@ readme = "../README.md" [dependencies] bytemuck = { version = "1.14.0", features = ["derive"] } spl-token-2022 = { version = "1.0", features = ["no-entrypoint"] } -spl-account-compression = { path = "../mpl-bubblegum/solana-program-library/account-compression/programs/account-compression", features = ["no-entrypoint"] } -spl-noop = { path = "../mpl-bubblegum/solana-program-library/account-compression/programs/noop", features = ["no-entrypoint"] } -mpl-bubblegum = { path = "../mpl-bubblegum/clients/rust" } +spl-account-compression = { git = "https://github.com/StanChe/solana-program-library.git", branch = "feature/init_with_root", features = ["no-entrypoint"] } +spl-noop = { workspace = true } +mpl-bubblegum = { workspace = true } mpl-core = { git = "https://github.com/RequescoS/mpl-core.git", features = ["serde"] } mpl-token-metadata = { version = "4.1.1", features = ["serde"] } spl-token = { version = "4.0.0", features = ["no-entrypoint"] } @@ -21,7 +21,7 @@ async-trait = "0.1.57" bs58 = "0.4.0" lazy_static = "1.4.0" borsh = "~0.10.3" -thiserror = "1.0.32" +thiserror = "1.0.63" log = "0.4.17" solana-sdk = "~1.18.11" solana-transaction-status = "~1.18.11" diff --git a/blockbuster/blockbuster/src/programs/bubblegum/mod.rs b/blockbuster/blockbuster/src/programs/bubblegum/mod.rs index e1f50a553..d074bc0ee 100644 --- a/blockbuster/blockbuster/src/programs/bubblegum/mod.rs +++ b/blockbuster/blockbuster/src/programs/bubblegum/mod.rs @@ -9,8 +9,9 @@ use log::warn; use mpl_bubblegum::{ get_instruction_type, instructions::{ - FinalizeTreeWithRootInstructionArgs, UnverifyCreatorInstructionArgs, - UpdateMetadataInstructionArgs, VerifyCreatorInstructionArgs, + FinalizeTreeWithRootAndCollectionInstructionArgs, FinalizeTreeWithRootInstructionArgs, + UnverifyCreatorInstructionArgs, UpdateMetadataInstructionArgs, + VerifyCreatorInstructionArgs, }, types::{BubblegumEventType, MetadataArgs, UpdateArgs}, }; @@ -29,25 +30,28 @@ use spl_noop; #[derive(BorshSerialize, BorshDeserialize, Clone, Debug, Eq, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct FinalizeTreeWithRootInstructionArgsWithStaker { - pub rightmost_root: [u8; 32], + pub root: [u8; 32], pub rightmost_leaf: [u8; 32], pub rightmost_index: u32, pub metadata_url: String, pub metadata_hash: String, pub staker: Pubkey, + pub collection_mint: Option, } impl FinalizeTreeWithRootInstructionArgsWithStaker { fn build_finalize_tree_with_root_instruction_args_with_staker( args: FinalizeTreeWithRootInstructionArgs, staker: Pubkey, + collection_mint: Option, ) -> Self { Self { - rightmost_root: args.rightmost_root, + root: args.root, rightmost_leaf: args.rightmost_leaf, rightmost_index: args.rightmost_index, metadata_url: args.metadata_url, metadata_hash: args.metadata_hash, staker, + collection_mint, } } } @@ -80,7 +84,7 @@ pub enum Payload { update_args: UpdateArgs, tree_id: Pubkey, }, - CreateTreeWithRoot { + FinalizeTreeWithRoot { args: FinalizeTreeWithRootInstructionArgsWithStaker, tree_id: Pubkey, }, @@ -237,9 +241,14 @@ impl ProgramParser for BubblegumParser { InstructionName::UpdateMetadata => { b_inst.payload = Some(build_update_metadata_payload(keys, ix_data)?); } - InstructionName::CreateTreeWithRoot => { + InstructionName::FinalizeTreeWithRoot => { b_inst.payload = Some(build_create_tree_with_root_payload(keys, ix_data)?); } + InstructionName::FinalizeTreeWithRootAndCollection => { + b_inst.payload = Some(build_create_tree_with_root_and_collection_payload( + keys, ix_data, + )?); + } _ => {} }; } @@ -348,7 +357,35 @@ fn build_create_tree_with_root_payload( let staker = *keys .get(4) .ok_or(BlockbusterError::InstructionParsingError)?; - let args = FinalizeTreeWithRootInstructionArgsWithStaker::build_finalize_tree_with_root_instruction_args_with_staker(args, staker); + let args = FinalizeTreeWithRootInstructionArgsWithStaker::build_finalize_tree_with_root_instruction_args_with_staker(args, staker, None); + + Ok(Payload::FinalizeTreeWithRoot { args, tree_id }) +} + +// See Bubblegum for offsets and positions: +// https://github.com/metaplex-foundation/mpl-bubblegum/blob/main/programs/bubblegum/README.md +fn build_create_tree_with_root_and_collection_payload( + keys: &[Pubkey], + ix_data: &[u8], +) -> Result { + let args = FinalizeTreeWithRootAndCollectionInstructionArgs::try_from_slice(ix_data)?; + + let tree_id = *keys + .get(1) + .ok_or(BlockbusterError::InstructionParsingError)?; + let staker = *keys + .get(4) + .ok_or(BlockbusterError::InstructionParsingError)?; + let collection_mint = *keys + .get(11) + .ok_or(BlockbusterError::InstructionParsingError)?; + let args = FinalizeTreeWithRootInstructionArgsWithStaker::build_finalize_tree_with_root_instruction_args_with_staker(FinalizeTreeWithRootInstructionArgs { + root: args.root, + rightmost_leaf: args.rightmost_leaf, + rightmost_index: args.rightmost_index, + metadata_url: args.metadata_url, + metadata_hash: args.metadata_hash, + }, staker, Some(collection_mint)); - Ok(Payload::CreateTreeWithRoot { args, tree_id }) + Ok(Payload::FinalizeTreeWithRoot { args, tree_id }) } diff --git a/digital_asset_types/src/dao/generated/batch_mint_to_verify.rs b/digital_asset_types/src/dao/generated/batch_mint_to_verify.rs index 5ace93414..5be802e9c 100644 --- a/digital_asset_types/src/dao/generated/batch_mint_to_verify.rs +++ b/digital_asset_types/src/dao/generated/batch_mint_to_verify.rs @@ -21,6 +21,7 @@ pub struct Model { pub created_at_slot: i64, pub signature: String, pub staker: Vec, + pub collection: Option>, pub download_attempts: i32, pub batch_mint_persisting_state: BatchMintPersistingState, pub batch_mint_fail_status: Option, @@ -33,6 +34,7 @@ pub enum Column { CreatedAtSlot, Signature, Staker, + Collection, DownloadAttempts, BatchMintPersistingState, BatchMintFailStatus, @@ -62,6 +64,7 @@ impl ColumnTrait for Column { Self::CreatedAtSlot => ColumnType::BigInteger.def(), Self::Signature => ColumnType::String(None).def(), Self::Staker => ColumnType::Binary.def(), + Self::Collection => ColumnType::Binary.def().null(), Self::DownloadAttempts => ColumnType::Integer.def(), Self::BatchMintPersistingState => BatchMintPersistingState::db_type(), Self::BatchMintFailStatus => BatchMintFailStatus::db_type().null(), diff --git a/digital_asset_types/src/dao/generated/prelude.rs b/digital_asset_types/src/dao/generated/prelude.rs index e361cc145..37cc418af 100644 --- a/digital_asset_types/src/dao/generated/prelude.rs +++ b/digital_asset_types/src/dao/generated/prelude.rs @@ -1,7 +1,5 @@ //! SeaORM Entity. Generated by sea-orm-codegen 0.9.3 -#![allow(unused_imports)] - pub use super::asset::Entity as Asset; pub use super::asset_authority::Entity as AssetAuthority; pub use super::asset_creators::Entity as AssetCreators; diff --git a/digital_asset_types/src/dao/generated/sea_orm_active_enums.rs b/digital_asset_types/src/dao/generated/sea_orm_active_enums.rs index ce89fcd7a..737607aee 100644 --- a/digital_asset_types/src/dao/generated/sea_orm_active_enums.rs +++ b/digital_asset_types/src/dao/generated/sea_orm_active_enums.rs @@ -7,31 +7,53 @@ use serde::{Deserialize, Serialize}; #[sea_orm( rs_type = "String", db_type = "Enum", - enum_name = "v1_account_attachments" + enum_name = "specification_versions" )] -pub enum V1AccountAttachments { - #[sea_orm(string_value = "edition")] - Edition, - #[sea_orm(string_value = "edition_marker")] - EditionMarker, - #[sea_orm(string_value = "master_edition_v1")] - MasterEditionV1, - #[sea_orm(string_value = "master_edition_v2")] - MasterEditionV2, +pub enum SpecificationVersions { #[sea_orm(string_value = "unknown")] Unknown, + #[sea_orm(string_value = "v0")] + V0, + #[sea_orm(string_value = "v1")] + V1, + #[sea_orm(string_value = "v2")] + V2, } #[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] -#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")] -pub enum TaskStatus { - #[sea_orm(string_value = "failed")] - Failed, - #[sea_orm(string_value = "pending")] - Pending, - #[sea_orm(string_value = "running")] - Running, - #[sea_orm(string_value = "success")] - Success, +#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "instruction")] +pub enum Instruction { + #[sea_orm(string_value = "burn")] + Burn, + #[sea_orm(string_value = "cancel_redeem")] + CancelRedeem, + #[sea_orm(string_value = "compress")] + Compress, + #[sea_orm(string_value = "decompress_v1")] + DecompressV1, + #[sea_orm(string_value = "delegate")] + Delegate, + #[sea_orm(string_value = "mint_to_collection_v1")] + MintToCollectionV1, + #[sea_orm(string_value = "mint_v1")] + MintV1, + #[sea_orm(string_value = "redeem")] + Redeem, + #[sea_orm(string_value = "set_and_verify_collection")] + SetAndVerifyCollection, + #[sea_orm(string_value = "transfer")] + Transfer, + #[sea_orm(string_value = "unknown")] + Unknown, + #[sea_orm(string_value = "unverify_collection")] + UnverifyCollection, + #[sea_orm(string_value = "unverify_creator")] + UnverifyCreator, + #[sea_orm(string_value = "update_metadata")] + UpdateMetadata, + #[sea_orm(string_value = "verify_collection")] + VerifyCollection, + #[sea_orm(string_value = "verify_creator")] + VerifyCreator, } #[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] #[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "owner_type")] @@ -44,6 +66,44 @@ pub enum OwnerType { Unknown, } #[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] +#[sea_orm( + rs_type = "String", + db_type = "Enum", + enum_name = "batch_mint_persisting_state" +)] +pub enum BatchMintPersistingState { + #[sea_orm(string_value = "failed_to_persist")] + FailedToPersist, + #[sea_orm(string_value = "received_transaction")] + ReceivedTransaction, + #[sea_orm(string_value = "start_processing")] + StartProcessing, + #[sea_orm(string_value = "stored_update")] + StoredUpdate, + #[sea_orm(string_value = "successfully_download")] + SuccessfullyDownload, + #[sea_orm(string_value = "successfully_validate")] + SuccessfullyValidate, +} +#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] +#[sea_orm( + rs_type = "String", + db_type = "Enum", + enum_name = "v1_account_attachments" +)] +pub enum V1AccountAttachments { + #[sea_orm(string_value = "edition")] + Edition, + #[sea_orm(string_value = "edition_marker")] + EditionMarker, + #[sea_orm(string_value = "master_edition_v1")] + MasterEditionV1, + #[sea_orm(string_value = "master_edition_v2")] + MasterEditionV2, + #[sea_orm(string_value = "unknown")] + Unknown, +} +#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] #[sea_orm( rs_type = "String", db_type = "Enum", @@ -76,24 +136,8 @@ pub enum SpecificationAssetClass { Unknown, } #[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] -#[sea_orm( - rs_type = "String", - db_type = "Enum", - enum_name = "royalty_target_type" -)] -pub enum RoyaltyTargetType { - #[sea_orm(string_value = "creators")] - Creators, - #[sea_orm(string_value = "fanout")] - Fanout, - #[sea_orm(string_value = "single")] - Single, - #[sea_orm(string_value = "unknown")] - Unknown, -} -#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] -#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "mutability")] -pub enum Mutability { +#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "chain_mutability")] +pub enum ChainMutability { #[sea_orm(string_value = "immutable")] Immutable, #[sea_orm(string_value = "mutable")] @@ -102,60 +146,36 @@ pub enum Mutability { Unknown, } #[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] -#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "instruction")] -pub enum Instruction { - #[sea_orm(string_value = "burn")] - Burn, - #[sea_orm(string_value = "cancel_redeem")] - CancelRedeem, - #[sea_orm(string_value = "compress")] - Compress, - #[sea_orm(string_value = "decompress_v1")] - DecompressV1, - #[sea_orm(string_value = "delegate")] - Delegate, - #[sea_orm(string_value = "mint_to_collection_v1")] - MintToCollectionV1, - #[sea_orm(string_value = "mint_v1")] - MintV1, - #[sea_orm(string_value = "redeem")] - Redeem, - #[sea_orm(string_value = "set_and_verify_collection")] - SetAndVerifyCollection, - #[sea_orm(string_value = "transfer")] - Transfer, - #[sea_orm(string_value = "unknown")] - Unknown, - #[sea_orm(string_value = "unverify_collection")] - UnverifyCollection, - #[sea_orm(string_value = "unverify_creator")] - UnverifyCreator, - #[sea_orm(string_value = "update_metadata")] - UpdateMetadata, - #[sea_orm(string_value = "verify_collection")] - VerifyCollection, - #[sea_orm(string_value = "verify_creator")] - VerifyCreator, +#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "task_status")] +pub enum TaskStatus { + #[sea_orm(string_value = "failed")] + Failed, + #[sea_orm(string_value = "pending")] + Pending, + #[sea_orm(string_value = "running")] + Running, + #[sea_orm(string_value = "success")] + Success, } #[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] #[sea_orm( rs_type = "String", db_type = "Enum", - enum_name = "specification_versions" + enum_name = "royalty_target_type" )] -pub enum SpecificationVersions { +pub enum RoyaltyTargetType { + #[sea_orm(string_value = "creators")] + Creators, + #[sea_orm(string_value = "fanout")] + Fanout, + #[sea_orm(string_value = "single")] + Single, #[sea_orm(string_value = "unknown")] Unknown, - #[sea_orm(string_value = "v0")] - V0, - #[sea_orm(string_value = "v1")] - V1, - #[sea_orm(string_value = "v2")] - V2, } #[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] -#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "chain_mutability")] -pub enum ChainMutability { +#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "mutability")] +pub enum Mutability { #[sea_orm(string_value = "immutable")] Immutable, #[sea_orm(string_value = "mutable")] @@ -164,26 +184,6 @@ pub enum ChainMutability { Unknown, } #[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] -#[sea_orm( - rs_type = "String", - db_type = "Enum", - enum_name = "batch_mint_persisting_state" -)] -pub enum BatchMintPersistingState { - #[sea_orm(string_value = "failed_to_persist")] - FailedToPersist, - #[sea_orm(string_value = "received_transaction")] - ReceivedTransaction, - #[sea_orm(string_value = "start_processing")] - StartProcessing, - #[sea_orm(string_value = "stored_update")] - StoredUpdate, - #[sea_orm(string_value = "successfully_download")] - SuccessfullyDownload, - #[sea_orm(string_value = "successfully_validate")] - SuccessfullyValidate, -} -#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)] #[sea_orm( rs_type = "String", db_type = "Enum", diff --git a/digital_asset_types/tests/json_parsing.rs b/digital_asset_types/tests/json_parsing.rs index e630c91ff..ba0a3126a 100644 --- a/digital_asset_types/tests/json_parsing.rs +++ b/digital_asset_types/tests/json_parsing.rs @@ -10,7 +10,7 @@ use solana_sdk::signature::Keypair; use solana_sdk::signer::Signer; pub async fn load_test_json(file_name: &str) -> serde_json::Value { - let json = tokio::fs::read_to_string(format!("tools/data/{}", file_name)) + let json = tokio::fs::read_to_string(format!("tests/data/{}", file_name)) .await .unwrap(); serde_json::from_str(&json).unwrap() diff --git a/integration_tests/Cargo.toml b/integration_tests/Cargo.toml index a100b83c9..c389b61a6 100644 --- a/integration_tests/Cargo.toml +++ b/integration_tests/Cargo.toml @@ -37,3 +37,6 @@ mpl-bubblegum = { workspace = true } spl-concurrent-merkle-tree = { workspace = true } tempfile = { workspace = true } async-channel = { workspace = true } +cadence = { workspace = true } +cadence-macros = { workspace = true } +bubblegum-batch-sdk = { workspace = true } \ No newline at end of file diff --git a/integration_tests/tests/integration_tests/batch_mint_tests.rs b/integration_tests/tests/integration_tests/batch_mint_tests.rs index 1c57bf958..585949579 100644 --- a/integration_tests/tests/integration_tests/batch_mint_tests.rs +++ b/integration_tests/tests/integration_tests/batch_mint_tests.rs @@ -1,11 +1,19 @@ use crate::common::TestSetup; use borsh::BorshSerialize; +use bubblegum_batch_sdk::batch_mint_client::BatchMintClient; +use bubblegum_batch_sdk::model::CollectionConfig; +use cadence::{NopMetricSink, StatsdClient}; +use cadence_macros::set_global_default; use das_api::api::ApiContract; use das_api::api::GetAssetProof; -use digital_asset_types::dao::sea_orm_active_enums::{RollupFailStatus, RollupPersistingState}; -use digital_asset_types::dao::{rollup, rollup_to_verify}; +use digital_asset_types::dao::sea_orm_active_enums::{ + BatchMintFailStatus, BatchMintPersistingState, +}; +use digital_asset_types::dao::{batch_mint, batch_mint_to_verify}; use flatbuffers::FlatBufferBuilder; -use mpl_bubblegum::types::LeafSchema; +use mpl_bubblegum::types::Collection; +use mpl_bubblegum::types::Creator; +use mpl_bubblegum::types::{LeafSchema, MetadataArgs}; use nft_ingester::batch_mint_updates::create_batch_mint_notification_channel; use nft_ingester::plerkle::PlerkleTransactionInfo; use plerkle_serialization::root_as_transaction_info; @@ -18,20 +26,32 @@ use program_transformers::error::BatchMintValidationError; use sea_orm::sea_query::OnConflict; use sea_orm::{ColumnTrait, ConnectionTrait, DbBackend, IntoActiveModel, QueryTrait, Set}; use sea_orm::{EntityTrait, QueryFilter}; +use solana_client::nonblocking::rpc_client::RpcClient; use solana_sdk::instruction::CompiledInstruction; use solana_sdk::keccak; use solana_sdk::message::{Message, MessageHeader}; use solana_sdk::pubkey::Pubkey; +use solana_sdk::signature::Keypair; use solana_sdk::signature::Signature; +use solana_sdk::signer::Signer; use solana_sdk::transaction::{SanitizedTransaction, Transaction}; use solana_transaction_status::{InnerInstruction, InnerInstructions, TransactionStatusMeta}; use spl_concurrent_merkle_tree::concurrent_merkle_tree::ConcurrentMerkleTree; +use std::collections::HashMap; use std::fs::File; use std::str::FromStr; +use std::sync::Arc; +use std::time::Duration; use tokio::task::JoinSet; #[tokio::test] async fn save_batch_mint_to_queue_test() { + let client = StatsdClient::builder("batch_mint.test", NopMetricSink) + .with_error_handler(|e| eprintln!("metric error: {}", e)) + .build(); + + set_global_default(client); + let setup = TestSetup::new("save_batch_mint_to_queue_test".to_string()).await; let metadata_url = "url".to_string(); let metadata_hash = "hash".to_string(); @@ -39,7 +59,7 @@ async fn save_batch_mint_to_queue_test() { // arbitrary data let batch_mint_instruction_data = mpl_bubblegum::instructions::FinalizeTreeWithRootInstructionArgs { - rightmost_root: [1; 32], + root: [1; 32], rightmost_leaf: [1; 32], rightmost_index: 99, metadata_url: metadata_url.clone(), @@ -112,8 +132,8 @@ async fn save_batch_mint_to_queue_test() { .await .unwrap(); - let r = rollup_to_verify::Entity::find() - .filter(rollup_to_verify::Column::FileHash.eq(metadata_hash.clone())) + let r = batch_mint_to_verify::Entity::find() + .filter(batch_mint_to_verify::Column::FileHash.eq(metadata_hash.clone())) .one(setup.db.as_ref()) .await .unwrap() @@ -166,8 +186,14 @@ fn generate_merkle_tree_from_batch_mint(batch_mint: &BatchMint) -> ConcurrentMer #[tokio::test] async fn batch_mint_persister_test() { + let client = StatsdClient::builder("batch_mint.test", NopMetricSink) + .with_error_handler(|e| eprintln!("metric error: {}", e)) + .build(); + + set_global_default(client); + let setup = TestSetup::new("batch_mint_persister_test".to_string()).await; - let test_batch_mint = generate_batch_mint(10); + let test_batch_mint = generate_batch_mint(10, false); let tmp_dir = tempfile::TempDir::new().unwrap(); let tmp_file = File::create(tmp_dir.path().join("batch-mint-10.json")).unwrap(); @@ -175,27 +201,28 @@ async fn batch_mint_persister_test() { let metadata_url = "url".to_string(); let metadata_hash = "hash".to_string(); - let batch_mint_to_verify = rollup_to_verify::ActiveModel { + let batch_mint_to_verify = batch_mint_to_verify::ActiveModel { file_hash: Set(metadata_hash.clone()), url: Set(metadata_url.clone()), created_at_slot: Set(10), signature: Set(Signature::new_unique().to_string()), staker: Set(Pubkey::default().to_bytes().to_vec()), download_attempts: Set(0), - rollup_persisting_state: Set(RollupPersistingState::ReceivedTransaction), - rollup_fail_status: Set(None), + batch_mint_persisting_state: Set(BatchMintPersistingState::ReceivedTransaction), + batch_mint_fail_status: Set(None), + collection: Set(None), } .into_active_model(); - let query = rollup_to_verify::Entity::insert(batch_mint_to_verify) + let query = batch_mint_to_verify::Entity::insert(batch_mint_to_verify) .on_conflict( - OnConflict::columns([rollup_to_verify::Column::FileHash]) - .update_columns([rollup_to_verify::Column::Url]) - .update_columns([rollup_to_verify::Column::Signature]) - .update_columns([rollup_to_verify::Column::DownloadAttempts]) - .update_columns([rollup_to_verify::Column::RollupFailStatus]) - .update_columns([rollup_to_verify::Column::RollupPersistingState]) - .update_columns([rollup_to_verify::Column::CreatedAtSlot]) + OnConflict::columns([batch_mint_to_verify::Column::FileHash]) + .update_columns([batch_mint_to_verify::Column::Url]) + .update_columns([batch_mint_to_verify::Column::Signature]) + .update_columns([batch_mint_to_verify::Column::DownloadAttempts]) + .update_columns([batch_mint_to_verify::Column::BatchMintFailStatus]) + .update_columns([batch_mint_to_verify::Column::BatchMintPersistingState]) + .update_columns([batch_mint_to_verify::Column::CreatedAtSlot]) .to_owned(), ) .build(DbBackend::Postgres); @@ -264,19 +291,19 @@ async fn batch_mint_persister_test() { ); assert_eq!( - rollup_to_verify::Entity::find() - .filter(rollup_to_verify::Column::FileHash.eq(metadata_hash.clone())) + batch_mint_to_verify::Entity::find() + .filter(batch_mint_to_verify::Column::FileHash.eq(metadata_hash.clone())) .one(setup.db.as_ref()) .await .unwrap() .unwrap() - .rollup_persisting_state, - RollupPersistingState::StoredUpdate + .batch_mint_persisting_state, + BatchMintPersistingState::StoredUpdate ); assert_eq!( - rollup::Entity::find() - .filter(rollup::Column::FileHash.eq(metadata_hash.clone())) + batch_mint::Entity::find() + .filter(batch_mint::Column::FileHash.eq(metadata_hash.clone())) .one(setup.db.as_ref()) .await .unwrap() @@ -287,8 +314,14 @@ async fn batch_mint_persister_test() { #[tokio::test] async fn batch_mint_persister_download_fail_test() { + let client = StatsdClient::builder("batch_mint.test", NopMetricSink) + .with_error_handler(|e| eprintln!("metric error: {}", e)) + .build(); + + set_global_default(client); + let setup = TestSetup::new("batch_mint_persister_download_fail_test".to_string()).await; - let test_batch_mint = generate_batch_mint(10); + let test_batch_mint = generate_batch_mint(10, false); let tmp_dir = tempfile::TempDir::new().unwrap(); let tmp_file = File::create(tmp_dir.path().join("batch-mint-10.json")).unwrap(); serde_json::to_writer(tmp_file, &test_batch_mint).unwrap(); @@ -296,27 +329,28 @@ async fn batch_mint_persister_download_fail_test() { let download_attempts = 0; let metadata_url = "url".to_string(); let metadata_hash = "hash".to_string(); - let batch_mint_to_verify = rollup_to_verify::ActiveModel { + let batch_mint_to_verify = batch_mint_to_verify::ActiveModel { file_hash: Set(metadata_hash.clone()), url: Set(metadata_url.clone()), created_at_slot: Set(10), signature: Set(Signature::new_unique().to_string()), staker: Set(Pubkey::default().to_bytes().to_vec()), download_attempts: Set(download_attempts), - rollup_persisting_state: Set(RollupPersistingState::ReceivedTransaction), - rollup_fail_status: Set(None), + batch_mint_persisting_state: Set(BatchMintPersistingState::ReceivedTransaction), + batch_mint_fail_status: Set(None), + collection: Set(None), } .into_active_model(); - let query = rollup_to_verify::Entity::insert(batch_mint_to_verify) + let query = batch_mint_to_verify::Entity::insert(batch_mint_to_verify) .on_conflict( - OnConflict::columns([rollup_to_verify::Column::FileHash]) - .update_columns([rollup_to_verify::Column::Url]) - .update_columns([rollup_to_verify::Column::Signature]) - .update_columns([rollup_to_verify::Column::DownloadAttempts]) - .update_columns([rollup_to_verify::Column::RollupFailStatus]) - .update_columns([rollup_to_verify::Column::RollupPersistingState]) - .update_columns([rollup_to_verify::Column::CreatedAtSlot]) + OnConflict::columns([batch_mint_to_verify::Column::FileHash]) + .update_columns([batch_mint_to_verify::Column::Url]) + .update_columns([batch_mint_to_verify::Column::Signature]) + .update_columns([batch_mint_to_verify::Column::DownloadAttempts]) + .update_columns([batch_mint_to_verify::Column::BatchMintFailStatus]) + .update_columns([batch_mint_to_verify::Column::BatchMintPersistingState]) + .update_columns([batch_mint_to_verify::Column::CreatedAtSlot]) .to_owned(), ) .build(DbBackend::Postgres); @@ -345,23 +379,674 @@ async fn batch_mint_persister_download_fail_test() { .await; assert_eq!( - rollup_to_verify::Entity::find() - .filter(rollup_to_verify::Column::FileHash.eq(metadata_hash.clone())) + batch_mint_to_verify::Entity::find() + .filter(batch_mint_to_verify::Column::FileHash.eq(metadata_hash.clone())) + .one(setup.db.as_ref()) + .await + .unwrap() + .unwrap() + .batch_mint_persisting_state, + BatchMintPersistingState::FailedToPersist + ); + assert_eq!( + batch_mint_to_verify::Entity::find() + .filter(batch_mint_to_verify::Column::FileHash.eq(metadata_hash.clone())) + .one(setup.db.as_ref()) + .await + .unwrap() + .unwrap() + .batch_mint_fail_status, + Some(BatchMintFailStatus::DownloadFailed) + ); +} + +#[tokio::test] +async fn batch_mint_with_verified_creators_test() { + // For this test it's necessary to use Solana mainnet RPC + let url = "https://api.mainnet-beta.solana.com".to_string(); + let solana_client = Arc::new(RpcClient::new_with_timeout(url, Duration::from_secs(3))); + // Merkle tree created in mainnet for testing purposes + let tree_key = Pubkey::from_str("AGMiLKtXX7PiVneM8S1KkTmCnF7X5zh6bKq4t1Mhrwpb").unwrap(); + + // First we have to create offchain Merkle tree with SDK + + let batch_mint_client = BatchMintClient::new(solana_client); + let mut batch_mint_builder = batch_mint_client + .create_batch_mint_builder(&tree_key) + .await + .unwrap(); + + let asset_creator = Keypair::new(); + let owner = Keypair::new(); + let delegate = Keypair::new(); + + let asset = MetadataArgs { + name: "Name".to_string(), + symbol: "Symbol".to_string(), + uri: "https://immutable-storage/asset/".to_string(), + seller_fee_basis_points: 0, + primary_sale_happened: false, + is_mutable: false, + edition_nonce: None, + token_standard: Some(mpl_bubblegum::types::TokenStandard::NonFungible), + collection: None, + uses: None, + token_program_version: mpl_bubblegum::types::TokenProgramVersion::Original, + creators: vec![Creator { + address: asset_creator.pubkey(), + verified: true, + share: 100, + }], + }; + + let metadata_hash_arg = batch_mint_builder + .add_asset(&owner.pubkey(), &delegate.pubkey(), &asset) + .unwrap(); + + let signature = asset_creator.sign_message(&metadata_hash_arg.get_message()); + + let mut creators_signatures = HashMap::new(); + creators_signatures.insert(asset_creator.pubkey(), signature); + + let mut message_and_signatures = HashMap::new(); + message_and_signatures.insert(metadata_hash_arg.get_nonce(), creators_signatures); + + batch_mint_builder + .add_signatures_for_verified_creators(message_and_signatures) + .unwrap(); + + let finalized_batch_mint = batch_mint_builder.build_batch_mint().unwrap(); + + // Offchain Merkle tree creation is finished + // Start to process it + + let client = StatsdClient::builder("batch_mint.test", NopMetricSink) + .with_error_handler(|e| eprintln!("metric error: {}", e)) + .build(); + + set_global_default(client); + + let setup = TestSetup::new("batch_mint_with_verified_creators_test".to_string()).await; + + let tmp_dir = tempfile::TempDir::new().unwrap(); + let tmp_file = File::create(tmp_dir.path().join("batch-mint.json")).unwrap(); + serde_json::to_writer(tmp_file, &finalized_batch_mint).unwrap(); + + let download_attempts = 0; + let metadata_url = "url".to_string(); + let metadata_hash = "hash".to_string(); + let batch_mint_to_verify = batch_mint_to_verify::ActiveModel { + file_hash: Set(metadata_hash.clone()), + url: Set(metadata_url.clone()), + created_at_slot: Set(10), + signature: Set(Signature::new_unique().to_string()), + staker: Set(Pubkey::default().to_bytes().to_vec()), + download_attempts: Set(download_attempts), + batch_mint_persisting_state: Set(BatchMintPersistingState::ReceivedTransaction), + batch_mint_fail_status: Set(None), + collection: Set(None), + } + .into_active_model(); + + let query = batch_mint_to_verify::Entity::insert(batch_mint_to_verify) + .on_conflict( + OnConflict::columns([batch_mint_to_verify::Column::FileHash]) + .update_columns([batch_mint_to_verify::Column::Url]) + .update_columns([batch_mint_to_verify::Column::Signature]) + .update_columns([batch_mint_to_verify::Column::DownloadAttempts]) + .update_columns([batch_mint_to_verify::Column::BatchMintFailStatus]) + .update_columns([batch_mint_to_verify::Column::BatchMintPersistingState]) + .update_columns([batch_mint_to_verify::Column::CreatedAtSlot]) + .to_owned(), + ) + .build(DbBackend::Postgres); + setup.db.execute(query).await.unwrap(); + + let mut mocked_downloader = MockBatchMintDownloader::new(); + mocked_downloader + .expect_download_batch_mint_and_check_checksum() + .returning(move |_, _| { + let json_file = + std::fs::read_to_string(tmp_dir.path().join("batch-mint.json")).unwrap(); + Ok(Box::new(serde_json::from_str(&json_file).unwrap())) + }); + + let mut tasks = JoinSet::new(); + let r = create_batch_mint_notification_channel(&setup.database_test_url, &mut tasks) + .await + .unwrap(); + let batch_mint_persister = BatchMintPersister::new(setup.db.clone(), r, mocked_downloader); + let (batch_mint_to_verify, _) = batch_mint_persister + .get_batch_mint_to_verify() + .await + .unwrap(); + batch_mint_persister + .persist_batch_mint(batch_mint_to_verify.unwrap(), None) + .await; + + assert_eq!( + batch_mint_to_verify::Entity::find() + .filter(batch_mint_to_verify::Column::FileHash.eq(metadata_hash.clone())) + .one(setup.db.as_ref()) + .await + .unwrap() + .unwrap() + .batch_mint_persisting_state, + BatchMintPersistingState::StoredUpdate + ); + + assert_eq!( + batch_mint::Entity::find() + .filter(batch_mint::Column::FileHash.eq(metadata_hash.clone())) + .one(setup.db.as_ref()) + .await + .unwrap() + .is_some(), + true + ); +} + +#[tokio::test] +async fn batch_mint_with_unverified_creators_test() { + let setup = TestSetup::new("batch_mint_with_unverified_creators_test".to_string()).await; + // generate batch mint with creators verified value set to true + // but signatures will not be attached + // batch should not be saved + let test_batch_mint = generate_batch_mint(10, true); + let tmp_dir = tempfile::TempDir::new().unwrap(); + + let tmp_file = File::create(tmp_dir.path().join("batch-mint-10.json")).unwrap(); + serde_json::to_writer(tmp_file, &test_batch_mint).unwrap(); + + let client = StatsdClient::builder("batch_mint.test", NopMetricSink) + .with_error_handler(|e| eprintln!("metric error: {}", e)) + .build(); + + set_global_default(client); + + let download_attempts = 0; + let metadata_url = "url".to_string(); + let metadata_hash = "hash".to_string(); + let batch_mint_to_verify = batch_mint_to_verify::ActiveModel { + file_hash: Set(metadata_hash.clone()), + url: Set(metadata_url.clone()), + created_at_slot: Set(10), + signature: Set(Signature::new_unique().to_string()), + staker: Set(Pubkey::default().to_bytes().to_vec()), + download_attempts: Set(download_attempts), + batch_mint_persisting_state: Set(BatchMintPersistingState::ReceivedTransaction), + batch_mint_fail_status: Set(None), + collection: Set(None), + } + .into_active_model(); + + let query = batch_mint_to_verify::Entity::insert(batch_mint_to_verify) + .on_conflict( + OnConflict::columns([batch_mint_to_verify::Column::FileHash]) + .update_columns([batch_mint_to_verify::Column::Url]) + .update_columns([batch_mint_to_verify::Column::Signature]) + .update_columns([batch_mint_to_verify::Column::DownloadAttempts]) + .update_columns([batch_mint_to_verify::Column::BatchMintFailStatus]) + .update_columns([batch_mint_to_verify::Column::BatchMintPersistingState]) + .update_columns([batch_mint_to_verify::Column::CreatedAtSlot]) + .to_owned(), + ) + .build(DbBackend::Postgres); + setup.db.execute(query).await.unwrap(); + + let mut mocked_downloader = MockBatchMintDownloader::new(); + mocked_downloader + .expect_download_batch_mint_and_check_checksum() + .returning(move |_, _| { + let json_file = + std::fs::read_to_string(tmp_dir.path().join("batch-mint-10.json")).unwrap(); + Ok(Box::new(serde_json::from_str(&json_file).unwrap())) + }); + + let mut tasks = JoinSet::new(); + let r = create_batch_mint_notification_channel(&setup.database_test_url, &mut tasks) + .await + .unwrap(); + let batch_mint_persister = BatchMintPersister::new(setup.db.clone(), r, mocked_downloader); + let (batch_mint_to_verify, _) = batch_mint_persister + .get_batch_mint_to_verify() + .await + .unwrap(); + batch_mint_persister + .persist_batch_mint(batch_mint_to_verify.unwrap(), None) + .await; + + assert_eq!( + batch_mint_to_verify::Entity::find() + .filter(batch_mint_to_verify::Column::FileHash.eq(metadata_hash.clone())) .one(setup.db.as_ref()) .await .unwrap() .unwrap() - .rollup_persisting_state, - RollupPersistingState::FailedToPersist + .batch_mint_persisting_state, + BatchMintPersistingState::FailedToPersist ); +} + +#[tokio::test] +async fn batch_mint_with_verified_collection_test() { + // For this test it's necessary to use Solana mainnet RPC + let url = "https://api.mainnet-beta.solana.com".to_string(); + let solana_client = Arc::new(RpcClient::new_with_timeout(url, Duration::from_secs(3))); + // Merkle tree created in mainnet for testing purposes + let tree_key = Pubkey::from_str("AGMiLKtXX7PiVneM8S1KkTmCnF7X5zh6bKq4t1Mhrwpb").unwrap(); + + // First we have to create offchain Merkle tree with SDK + + let batch_mint_client = BatchMintClient::new(solana_client); + let mut batch_mint_builder = batch_mint_client + .create_batch_mint_builder(&tree_key) + .await + .unwrap(); + + let asset_creator = Keypair::new(); + let owner = Keypair::new(); + let delegate = Keypair::new(); + let collection_key = Pubkey::new_unique(); + + let collection_config = CollectionConfig { + collection_authority: Keypair::from_bytes(asset_creator.to_bytes().as_ref()).unwrap(), + collection_authority_record_pda: None, + collection_mint: collection_key, + collection_metadata: Pubkey::new_unique(), // doesn't matter in this case + edition_account: Pubkey::new_unique(), // doesn't matter in this case + }; + batch_mint_builder.setup_collection_config(collection_config); + + let asset = MetadataArgs { + name: "Name".to_string(), + symbol: "Symbol".to_string(), + uri: "https://immutable-storage/asset/".to_string(), + seller_fee_basis_points: 0, + primary_sale_happened: false, + is_mutable: false, + edition_nonce: None, + token_standard: Some(mpl_bubblegum::types::TokenStandard::NonFungible), + collection: Some(Collection { + verified: true, + key: collection_key, + }), + uses: None, + token_program_version: mpl_bubblegum::types::TokenProgramVersion::Original, + creators: vec![Creator { + address: asset_creator.pubkey(), + verified: false, + share: 100, + }], + }; + + let _ = batch_mint_builder + .add_asset(&owner.pubkey(), &delegate.pubkey(), &asset) + .unwrap(); + + let finalized_batch_mint = batch_mint_builder.build_batch_mint().unwrap(); + + // Offchain Merkle tree creation is finished + // Start to process it + + let client = StatsdClient::builder("batch_mint.test", NopMetricSink) + .with_error_handler(|e| eprintln!("metric error: {}", e)) + .build(); + + set_global_default(client); + + let setup = TestSetup::new("batch_mint_with_verified_collection_test".to_string()).await; + + let tmp_dir = tempfile::TempDir::new().unwrap(); + let tmp_file = File::create(tmp_dir.path().join("batch-mint.json")).unwrap(); + serde_json::to_writer(tmp_file, &finalized_batch_mint).unwrap(); + + let download_attempts = 0; + let metadata_url = "url".to_string(); + let metadata_hash = "hash".to_string(); + let batch_mint_to_verify = batch_mint_to_verify::ActiveModel { + file_hash: Set(metadata_hash.clone()), + url: Set(metadata_url.clone()), + created_at_slot: Set(10), + signature: Set(Signature::new_unique().to_string()), + staker: Set(Pubkey::default().to_bytes().to_vec()), + download_attempts: Set(download_attempts), + batch_mint_persisting_state: Set(BatchMintPersistingState::ReceivedTransaction), + batch_mint_fail_status: Set(None), + collection: Set(Some(collection_key.to_bytes().to_vec())), + } + .into_active_model(); + + let query = batch_mint_to_verify::Entity::insert(batch_mint_to_verify) + .on_conflict( + OnConflict::columns([batch_mint_to_verify::Column::FileHash]) + .update_columns([batch_mint_to_verify::Column::Url]) + .update_columns([batch_mint_to_verify::Column::Signature]) + .update_columns([batch_mint_to_verify::Column::DownloadAttempts]) + .update_columns([batch_mint_to_verify::Column::BatchMintFailStatus]) + .update_columns([batch_mint_to_verify::Column::BatchMintPersistingState]) + .update_columns([batch_mint_to_verify::Column::CreatedAtSlot]) + .to_owned(), + ) + .build(DbBackend::Postgres); + setup.db.execute(query).await.unwrap(); + + let mut mocked_downloader = MockBatchMintDownloader::new(); + mocked_downloader + .expect_download_batch_mint_and_check_checksum() + .returning(move |_, _| { + let json_file = + std::fs::read_to_string(tmp_dir.path().join("batch-mint.json")).unwrap(); + Ok(Box::new(serde_json::from_str(&json_file).unwrap())) + }); + + let mut tasks = JoinSet::new(); + let r = create_batch_mint_notification_channel(&setup.database_test_url, &mut tasks) + .await + .unwrap(); + let batch_mint_persister = BatchMintPersister::new(setup.db.clone(), r, mocked_downloader); + let (batch_mint_to_verify, _) = batch_mint_persister + .get_batch_mint_to_verify() + .await + .unwrap(); + batch_mint_persister + .persist_batch_mint(batch_mint_to_verify.unwrap(), None) + .await; + + assert_eq!( + batch_mint_to_verify::Entity::find() + .filter(batch_mint_to_verify::Column::FileHash.eq(metadata_hash.clone())) + .one(setup.db.as_ref()) + .await + .unwrap() + .unwrap() + .batch_mint_persisting_state, + BatchMintPersistingState::StoredUpdate + ); + + assert_eq!( + batch_mint::Entity::find() + .filter(batch_mint::Column::FileHash.eq(metadata_hash.clone())) + .one(setup.db.as_ref()) + .await + .unwrap() + .is_some(), + true + ); +} + +#[tokio::test] +async fn batch_mint_with_wrong_collection_test() { + // For this test it's necessary to use Solana mainnet RPC + let url = "https://api.mainnet-beta.solana.com".to_string(); + let solana_client = Arc::new(RpcClient::new_with_timeout(url, Duration::from_secs(3))); + // Merkle tree created in mainnet for testing purposes + let tree_key = Pubkey::from_str("AGMiLKtXX7PiVneM8S1KkTmCnF7X5zh6bKq4t1Mhrwpb").unwrap(); + + // First we have to create offchain Merkle tree with SDK + + let batch_mint_client = BatchMintClient::new(solana_client); + let mut batch_mint_builder = batch_mint_client + .create_batch_mint_builder(&tree_key) + .await + .unwrap(); + + let asset_creator = Keypair::new(); + let owner = Keypair::new(); + let delegate = Keypair::new(); + let collection_key = Pubkey::new_unique(); + + let wrong_collection_key = Pubkey::new_unique(); + + let collection_config = CollectionConfig { + collection_authority: Keypair::from_bytes(asset_creator.to_bytes().as_ref()).unwrap(), + collection_authority_record_pda: None, + collection_mint: collection_key, + collection_metadata: Pubkey::new_unique(), // doesn't matter in this case + edition_account: Pubkey::new_unique(), // doesn't matter in this case + }; + batch_mint_builder.setup_collection_config(collection_config); + + let asset = MetadataArgs { + name: "Name".to_string(), + symbol: "Symbol".to_string(), + uri: "https://immutable-storage/asset/".to_string(), + seller_fee_basis_points: 0, + primary_sale_happened: false, + is_mutable: false, + edition_nonce: None, + token_standard: Some(mpl_bubblegum::types::TokenStandard::NonFungible), + collection: Some(Collection { + verified: true, + key: collection_key, + }), + uses: None, + token_program_version: mpl_bubblegum::types::TokenProgramVersion::Original, + creators: vec![Creator { + address: asset_creator.pubkey(), + verified: false, + share: 100, + }], + }; + + let _ = batch_mint_builder + .add_asset(&owner.pubkey(), &delegate.pubkey(), &asset) + .unwrap(); + + let finalized_batch_mint = batch_mint_builder.build_batch_mint().unwrap(); + + // Offchain Merkle tree creation is finished + // Start to process it + + let client = StatsdClient::builder("batch_mint.test", NopMetricSink) + .with_error_handler(|e| eprintln!("metric error: {}", e)) + .build(); + + set_global_default(client); + + let setup = TestSetup::new("batch_mint_with_verified_collection_test".to_string()).await; + + let tmp_dir = tempfile::TempDir::new().unwrap(); + let tmp_file = File::create(tmp_dir.path().join("batch-mint.json")).unwrap(); + serde_json::to_writer(tmp_file, &finalized_batch_mint).unwrap(); + + let download_attempts = 0; + let metadata_url = "url".to_string(); + let metadata_hash = "hash".to_string(); + let batch_mint_to_verify = batch_mint_to_verify::ActiveModel { + file_hash: Set(metadata_hash.clone()), + url: Set(metadata_url.clone()), + created_at_slot: Set(10), + signature: Set(Signature::new_unique().to_string()), + staker: Set(Pubkey::default().to_bytes().to_vec()), + download_attempts: Set(download_attempts), + batch_mint_persisting_state: Set(BatchMintPersistingState::ReceivedTransaction), + batch_mint_fail_status: Set(None), + collection: Set(Some(wrong_collection_key.to_bytes().to_vec())), + } + .into_active_model(); + + let query = batch_mint_to_verify::Entity::insert(batch_mint_to_verify) + .on_conflict( + OnConflict::columns([batch_mint_to_verify::Column::FileHash]) + .update_columns([batch_mint_to_verify::Column::Url]) + .update_columns([batch_mint_to_verify::Column::Signature]) + .update_columns([batch_mint_to_verify::Column::DownloadAttempts]) + .update_columns([batch_mint_to_verify::Column::BatchMintFailStatus]) + .update_columns([batch_mint_to_verify::Column::BatchMintPersistingState]) + .update_columns([batch_mint_to_verify::Column::CreatedAtSlot]) + .to_owned(), + ) + .build(DbBackend::Postgres); + setup.db.execute(query).await.unwrap(); + + let mut mocked_downloader = MockBatchMintDownloader::new(); + mocked_downloader + .expect_download_batch_mint_and_check_checksum() + .returning(move |_, _| { + let json_file = + std::fs::read_to_string(tmp_dir.path().join("batch-mint.json")).unwrap(); + Ok(Box::new(serde_json::from_str(&json_file).unwrap())) + }); + + let mut tasks = JoinSet::new(); + let r = create_batch_mint_notification_channel(&setup.database_test_url, &mut tasks) + .await + .unwrap(); + let batch_mint_persister = BatchMintPersister::new(setup.db.clone(), r, mocked_downloader); + let (batch_mint_to_verify, _) = batch_mint_persister + .get_batch_mint_to_verify() + .await + .unwrap(); + batch_mint_persister + .persist_batch_mint(batch_mint_to_verify.unwrap(), None) + .await; + + assert_eq!( + batch_mint_to_verify::Entity::find() + .filter(batch_mint_to_verify::Column::FileHash.eq(metadata_hash.clone())) + .one(setup.db.as_ref()) + .await + .unwrap() + .unwrap() + .batch_mint_persisting_state, + BatchMintPersistingState::FailedToPersist + ); +} + +#[tokio::test] +async fn batch_mint_with_unverified_collection_test() { + // For this test it's necessary to use Solana mainnet RPC + let url = "https://api.mainnet-beta.solana.com".to_string(); + let solana_client = Arc::new(RpcClient::new_with_timeout(url, Duration::from_secs(3))); + // Merkle tree created in mainnet for testing purposes + let tree_key = Pubkey::from_str("AGMiLKtXX7PiVneM8S1KkTmCnF7X5zh6bKq4t1Mhrwpb").unwrap(); + + // First we have to create offchain Merkle tree with SDK + + let batch_mint_client = BatchMintClient::new(solana_client); + let mut batch_mint_builder = batch_mint_client + .create_batch_mint_builder(&tree_key) + .await + .unwrap(); + + let asset_creator = Keypair::new(); + let owner = Keypair::new(); + let delegate = Keypair::new(); + let collection_key = Pubkey::new_unique(); + + let collection_config = CollectionConfig { + collection_authority: Keypair::from_bytes(asset_creator.to_bytes().as_ref()).unwrap(), + collection_authority_record_pda: None, + collection_mint: collection_key, + collection_metadata: Pubkey::new_unique(), // doesn't matter in this case + edition_account: Pubkey::new_unique(), // doesn't matter in this case + }; + batch_mint_builder.setup_collection_config(collection_config); + + let asset = MetadataArgs { + name: "Name".to_string(), + symbol: "Symbol".to_string(), + uri: "https://immutable-storage/asset/".to_string(), + seller_fee_basis_points: 0, + primary_sale_happened: false, + is_mutable: false, + edition_nonce: None, + token_standard: Some(mpl_bubblegum::types::TokenStandard::NonFungible), + collection: Some(Collection { + verified: true, + key: collection_key, + }), + uses: None, + token_program_version: mpl_bubblegum::types::TokenProgramVersion::Original, + creators: vec![Creator { + address: asset_creator.pubkey(), + verified: false, + share: 100, + }], + }; + + let _ = batch_mint_builder + .add_asset(&owner.pubkey(), &delegate.pubkey(), &asset) + .unwrap(); + + let finalized_batch_mint = batch_mint_builder.build_batch_mint().unwrap(); + + // Offchain Merkle tree creation is finished + // Start to process it + + let client = StatsdClient::builder("batch_mint.test", NopMetricSink) + .with_error_handler(|e| eprintln!("metric error: {}", e)) + .build(); + + set_global_default(client); + + let setup = TestSetup::new("batch_mint_with_verified_collection_test".to_string()).await; + + let tmp_dir = tempfile::TempDir::new().unwrap(); + let tmp_file = File::create(tmp_dir.path().join("batch-mint.json")).unwrap(); + serde_json::to_writer(tmp_file, &finalized_batch_mint).unwrap(); + + let download_attempts = 0; + let metadata_url = "url".to_string(); + let metadata_hash = "hash".to_string(); + let batch_mint_to_verify = batch_mint_to_verify::ActiveModel { + file_hash: Set(metadata_hash.clone()), + url: Set(metadata_url.clone()), + created_at_slot: Set(10), + signature: Set(Signature::new_unique().to_string()), + staker: Set(Pubkey::default().to_bytes().to_vec()), + download_attempts: Set(download_attempts), + batch_mint_persisting_state: Set(BatchMintPersistingState::ReceivedTransaction), + batch_mint_fail_status: Set(None), + collection: Set(None), + } + .into_active_model(); + + let query = batch_mint_to_verify::Entity::insert(batch_mint_to_verify) + .on_conflict( + OnConflict::columns([batch_mint_to_verify::Column::FileHash]) + .update_columns([batch_mint_to_verify::Column::Url]) + .update_columns([batch_mint_to_verify::Column::Signature]) + .update_columns([batch_mint_to_verify::Column::DownloadAttempts]) + .update_columns([batch_mint_to_verify::Column::BatchMintFailStatus]) + .update_columns([batch_mint_to_verify::Column::BatchMintPersistingState]) + .update_columns([batch_mint_to_verify::Column::CreatedAtSlot]) + .to_owned(), + ) + .build(DbBackend::Postgres); + setup.db.execute(query).await.unwrap(); + + let mut mocked_downloader = MockBatchMintDownloader::new(); + mocked_downloader + .expect_download_batch_mint_and_check_checksum() + .returning(move |_, _| { + let json_file = + std::fs::read_to_string(tmp_dir.path().join("batch-mint.json")).unwrap(); + Ok(Box::new(serde_json::from_str(&json_file).unwrap())) + }); + + let mut tasks = JoinSet::new(); + let r = create_batch_mint_notification_channel(&setup.database_test_url, &mut tasks) + .await + .unwrap(); + let batch_mint_persister = BatchMintPersister::new(setup.db.clone(), r, mocked_downloader); + let (batch_mint_to_verify, _) = batch_mint_persister + .get_batch_mint_to_verify() + .await + .unwrap(); + batch_mint_persister + .persist_batch_mint(batch_mint_to_verify.unwrap(), None) + .await; + assert_eq!( - rollup_to_verify::Entity::find() - .filter(rollup_to_verify::Column::FileHash.eq(metadata_hash.clone())) + batch_mint_to_verify::Entity::find() + .filter(batch_mint_to_verify::Column::FileHash.eq(metadata_hash.clone())) .one(setup.db.as_ref()) .await .unwrap() .unwrap() - .rollup_fail_status, - Some(RollupFailStatus::DownloadFailed) + .batch_mint_persisting_state, + BatchMintPersistingState::FailedToPersist ); } diff --git a/migration/src/m20240720_120101_add_finalize_tree_with_root_instruction_handle.rs b/migration/src/m20240720_120101_add_finalize_tree_with_root_instruction_handle.rs index 32c6b6cf5..8e491675b 100644 --- a/migration/src/m20240720_120101_add_finalize_tree_with_root_instruction_handle.rs +++ b/migration/src/m20240720_120101_add_finalize_tree_with_root_instruction_handle.rs @@ -68,6 +68,11 @@ impl MigrationTrait for Migration { .binary() .not_null(), ) + .col( + ColumnDef::new(BatchMintToVerify::Collection) + .binary() + .null(), + ) .col( ColumnDef::new(BatchMintToVerify::DownloadAttempts) .unsigned() @@ -174,6 +179,7 @@ enum BatchMintToVerify { BatchMintPersistingState, BatchMintFailStatus, Staker, + Collection, } #[derive(Iden, Debug, PartialEq, Sequence)] diff --git a/program_transformers/Cargo.toml b/program_transformers/Cargo.toml index 94929bbd3..86b25f22d 100644 --- a/program_transformers/Cargo.toml +++ b/program_transformers/Cargo.toml @@ -40,6 +40,8 @@ serde_with = { workspace = true } bincode = { workspace = true } rand = { workspace = true } async-channel = { workspace = true } +bubblegum-batch-sdk = { workspace = true } +solana-client = { workspace = true } [lints] workspace = true diff --git a/program_transformers/src/batch_minting/batch_mint_persister.rs b/program_transformers/src/batch_minting/batch_mint_persister.rs index 28f2d07e7..33b73810e 100644 --- a/program_transformers/src/batch_minting/batch_mint_persister.rs +++ b/program_transformers/src/batch_minting/batch_mint_persister.rs @@ -1,5 +1,7 @@ use anchor_lang::AnchorSerialize; use async_channel::Receiver; +use bubblegum_batch_sdk::batch_mint_builder::{verify_signature, MetadataArgsHash}; +use solana_sdk::signature::Signature; use std::collections::HashMap; use std::sync::Arc; @@ -26,6 +28,7 @@ use sea_orm::{ }; use serde::{Deserialize, Serialize}; use serde_json::value::RawValue; +use serde_with::DisplayFromStr; use solana_sdk::keccak; use solana_sdk::keccak::Hash; use solana_sdk::pubkey::Pubkey; @@ -55,6 +58,8 @@ pub struct BatchedMintInstruction { pub mint_args: MetadataArgs, #[serde(with = "serde_with::As::")] pub authority: Pubkey, + #[serde(with = "serde_with::As::>>")] + pub creator_signature: Option>, // signatures of the asset with the creator pubkey to ensure verified creator } #[derive(Default, Clone)] @@ -339,6 +344,7 @@ impl BatchMintPer download_attempts: Set(r.download_attempts), batch_mint_persisting_state: Set(BatchMintPersistingState::StartProcessing), batch_mint_fail_status: Set(r.batch_mint_fail_status.clone()), + collection: Set(r.collection.clone()), }) .filter(batch_mint_to_verify::Column::FileHash.eq(r.file_hash.clone())) .exec(&multi_txn) @@ -440,6 +446,7 @@ impl BatchMintPer batch_mint_fail_status: Set(batch_mint_to_verify .batch_mint_fail_status .clone()), + collection: Set(batch_mint_to_verify.collection.clone()), } .insert(self.txn.as_ref())) .await @@ -458,7 +465,9 @@ impl BatchMintPer batch_mint_to_verify: &mut batch_mint_to_verify::Model, batch_mint: &BatchMint, ) { - if let Err(e) = validate_batch_mint(batch_mint).await { + if let Err(e) = + validate_batch_mint(batch_mint, batch_mint_to_verify.collection.clone()).await + { error!("Error while validating batch mint: {}", e.to_string()); statsd_count!("batch_mint.validating_fail", 1); @@ -518,6 +527,7 @@ impl BatchMintPer download_attempts: Set(batch_mint.download_attempts), batch_mint_persisting_state: Set(batch_mint.batch_mint_persisting_state.clone()), batch_mint_fail_status: Set(Some(status)), + collection: Set(batch_mint.collection.clone()), }) .filter(batch_mint_to_verify::Column::FileHash.eq(batch_mint.file_hash.clone())) .exec(self.txn.as_ref()) @@ -545,9 +555,39 @@ impl BatchMintPer } } -pub async fn validate_batch_mint(batch_mint: &BatchMint) -> Result<(), BatchMintValidationError> { +pub async fn validate_batch_mint( + batch_mint: &BatchMint, + collection_mint: Option>, +) -> Result<(), BatchMintValidationError> { let mut leaf_hashes = Vec::new(); for asset in batch_mint.batch_mints.iter() { + verify_creators_signatures( + &batch_mint.tree_id, + asset, + asset.creator_signature.clone().unwrap_or_default(), + )?; + + if let Some(ref collection) = asset.mint_args.collection { + match &collection_mint { + None => { + if collection.verified { + return Err(BatchMintValidationError::WrongCollectionVerified( + collection.key.to_string(), + )); + } + } + Some(collection_mint) => { + if collection.verified && collection_mint != collection.key.to_bytes().as_ref() + { + return Err(BatchMintValidationError::VerifiedCollectionMismatch( + bs58::encode(collection_mint.clone()).into_string(), + collection.key.to_string(), + )); + } + } + } + } + let leaf_hash = match get_leaf_hash(asset, &batch_mint.tree_id) { Ok(leaf_hash) => leaf_hash, Err(e) => { @@ -565,6 +605,34 @@ pub async fn validate_batch_mint(batch_mint: &BatchMint) -> Result<(), BatchMint ) } +// TODO: move this func to SDK once this crate will import data types from SDK +fn verify_creators_signatures( + tree_key: &Pubkey, + rolled_mint: &BatchedMintInstruction, + creator_signatures: HashMap, +) -> Result<(), BatchMintValidationError> { + let metadata_hash = + MetadataArgsHash::new(&rolled_mint.leaf_update, tree_key, &rolled_mint.mint_args); + + for creator in &rolled_mint.mint_args.creators { + if creator.verified { + if let Some(signature) = creator_signatures.get(&creator.address) { + if !verify_signature(&creator.address, &metadata_hash.get_message(), signature) { + return Err(BatchMintValidationError::FailedCreatorVerification( + creator.address.to_string(), + )); + } + } else { + return Err(BatchMintValidationError::MissingCreatorSignature( + creator.address.to_string(), + )); + } + } + } + + Ok(()) +} + fn get_leaf_hash( asset: &BatchedMintInstruction, tree_id: &Pubkey, @@ -639,7 +707,7 @@ where slot, }, txn, - "CreateTreeWithRoot", + "FinalizeTreeWithRoot", false, ) .await?; diff --git a/program_transformers/src/batch_minting/tests.rs b/program_transformers/src/batch_minting/tests.rs index 68e3b1d14..6fe0dcabf 100644 --- a/program_transformers/src/batch_minting/tests.rs +++ b/program_transformers/src/batch_minting/tests.rs @@ -11,7 +11,7 @@ use spl_concurrent_merkle_tree::concurrent_merkle_tree::ConcurrentMerkleTree; use std::collections::HashMap; use std::str::FromStr; -pub fn generate_batch_mint(size: usize) -> BatchMint { +pub fn generate_batch_mint(size: usize, creators_verified: bool) -> BatchMint { let authority = Pubkey::from_str("3VvLDXqJbw3heyRwFxv8MmurPznmDVUJS9gPMX2BDqfM").unwrap(); let tree = Pubkey::from_str("HxhCw9g3kZvrdg9zZvctmh6qpSDg1FfsBXfFvRkbCHB7").unwrap(); let mut mints = Vec::new(); @@ -66,7 +66,7 @@ pub fn generate_batch_mint(size: usize) -> BatchMint { creators: (0..thread_rng().sample(rand::distributions::Uniform::new(1, 5))) .map(|_| mpl_bubblegum::types::Creator { address: Pubkey::new_unique(), - verified: false, + verified: creators_verified, share: thread_rng().sample(rand::distributions::Uniform::new(0, 100)), }) .collect(), @@ -141,6 +141,7 @@ pub fn generate_batch_mint(size: usize) -> BatchMint { }, mint_args, authority, + creator_signature: None, }; mints.push(rolled_mint); } @@ -159,16 +160,16 @@ pub fn generate_batch_mint(size: usize) -> BatchMint { #[tokio::test] async fn batch_mint_validation_test() { - let mut batch_mint = generate_batch_mint(1000); + let mut batch_mint = generate_batch_mint(1000, false); - let validation_result = validate_batch_mint(&batch_mint).await; + let validation_result = validate_batch_mint(&batch_mint, None).await; assert_eq!(validation_result, Ok(())); let old_root = batch_mint.merkle_root; let new_root = Pubkey::new_unique(); batch_mint.merkle_root = new_root.to_bytes(); - let validation_result = validate_batch_mint(&batch_mint).await; + let validation_result = validate_batch_mint(&batch_mint, None).await; assert_eq!( validation_result, Err(BatchMintValidationError::InvalidRoot( @@ -189,7 +190,7 @@ async fn batch_mint_validation_test() { data_hash: new_leaf_data_hash.to_bytes(), creator_hash: batch_mint.batch_mints[leaf_idx].leaf_update.creator_hash(), }; - let validation_result = validate_batch_mint(&batch_mint).await; + let validation_result = validate_batch_mint(&batch_mint, None).await; assert_eq!( validation_result, @@ -210,7 +211,7 @@ async fn batch_mint_validation_test() { let old_tree_depth = batch_mint.max_depth; let new_tree_depth = 100; batch_mint.max_depth = new_tree_depth; - let validation_result = validate_batch_mint(&batch_mint).await; + let validation_result = validate_batch_mint(&batch_mint, None).await; assert_eq!( validation_result, @@ -231,7 +232,7 @@ async fn batch_mint_validation_test() { data_hash: batch_mint.batch_mints[leaf_idx].leaf_update.data_hash(), creator_hash: batch_mint.batch_mints[leaf_idx].leaf_update.creator_hash(), }; - let validation_result = validate_batch_mint(&batch_mint).await; + let validation_result = validate_batch_mint(&batch_mint, None).await; assert_eq!( validation_result, @@ -260,7 +261,7 @@ async fn batch_mint_validation_test() { .collect::>(); let new_path = Vec::new(); batch_mint.batch_mints[leaf_idx].tree_update.path = new_path; - let validation_result = validate_batch_mint(&batch_mint).await; + let validation_result = validate_batch_mint(&batch_mint, None).await; assert_eq!( validation_result, @@ -276,7 +277,7 @@ async fn batch_mint_validation_test() { let old_tree_id = batch_mint.batch_mints[leaf_idx].tree_update.id; let new_tree_id = Pubkey::new_unique(); batch_mint.batch_mints[leaf_idx].tree_update.id = new_tree_id; - let validation_result = validate_batch_mint(&batch_mint).await; + let validation_result = validate_batch_mint(&batch_mint, None).await; assert_eq!( validation_result, @@ -294,7 +295,7 @@ async fn batch_mint_validation_test() { let old_index = batch_mint.batch_mints[leaf_idx].tree_update.index; let new_index = 1; batch_mint.batch_mints[leaf_idx].tree_update.index = new_index; - let validation_result = validate_batch_mint(&batch_mint).await; + let validation_result = validate_batch_mint(&batch_mint, None).await; assert_eq!( validation_result, diff --git a/program_transformers/src/bubblegum/finalize_tree_with_root.rs b/program_transformers/src/bubblegum/finalize_tree_with_root.rs index 124dc59e6..5b2cbab48 100644 --- a/program_transformers/src/bubblegum/finalize_tree_with_root.rs +++ b/program_transformers/src/bubblegum/finalize_tree_with_root.rs @@ -16,7 +16,7 @@ pub async fn finalize_tree_with_root<'c, T>( where T: ConnectionTrait + TransactionTrait, { - if let Some(Payload::CreateTreeWithRoot { args, .. }) = &parsing_result.payload { + if let Some(Payload::FinalizeTreeWithRoot { args, .. }) = &parsing_result.payload { let query = digital_asset_types::dao::batch_mint_to_verify::Entity::insert( digital_asset_types::dao::batch_mint_to_verify::ActiveModel { file_hash: Set(args.metadata_hash.clone()), @@ -27,6 +27,7 @@ where download_attempts: Set(0), batch_mint_persisting_state: Set(BatchMintPersistingState::ReceivedTransaction), batch_mint_fail_status: Set(None), + collection: Set(args.collection_mint.map(|k| k.to_bytes().to_vec())), }, ) .on_conflict( diff --git a/program_transformers/src/bubblegum/mod.rs b/program_transformers/src/bubblegum/mod.rs index fd18a305f..aadf692a5 100644 --- a/program_transformers/src/bubblegum/mod.rs +++ b/program_transformers/src/bubblegum/mod.rs @@ -59,7 +59,10 @@ where InstructionName::SetAndVerifyCollection => "SetAndVerifyCollection", InstructionName::SetDecompressibleState => "SetDecompressibleState", InstructionName::UpdateMetadata => "UpdateMetadata", - InstructionName::CreateTreeWithRoot => "CreateTreeWithRoot", + InstructionName::PrepareTree => "PrepareTree", + InstructionName::AddCanopy => "AddCanopy", + InstructionName::FinalizeTreeWithRoot => "FinalizeTreeWithRoot", + InstructionName::FinalizeTreeWithRootAndCollection => "FinalizeTreeWithRootAndCollection", }; info!("BGUM instruction txn={:?}: {:?}", ix_str, bundle.txn_id); @@ -111,7 +114,8 @@ where .map_err(ProgramTransformerError::DownloadMetadataNotify)?; } } - InstructionName::CreateTreeWithRoot => { + InstructionName::FinalizeTreeWithRoot + | InstructionName::FinalizeTreeWithRootAndCollection => { finalize_tree_with_root::finalize_tree_with_root(parsing_result, bundle, txn).await? } _ => debug!("Bubblegum: Not Implemented Instruction"), diff --git a/program_transformers/src/error.rs b/program_transformers/src/error.rs index 3b804e286..5193b51f8 100644 --- a/program_transformers/src/error.rs +++ b/program_transformers/src/error.rs @@ -70,6 +70,14 @@ pub enum BatchMintValidationError { Serialization(String), #[error("Reqwest: {0}")] Reqwest(String), + #[error("WrongCollectionVerified: {0}")] + WrongCollectionVerified(String), + #[error("VerifiedCollectionMismatch: expected :{0}, got :{1}")] + VerifiedCollectionMismatch(String, String), + #[error("Failed creator's signature verification: {0}")] + FailedCreatorVerification(String), + #[error("Missing creator's signature in batch mint: {0}")] + MissingCreatorSignature(String), } impl From for BatchMintValidationError {