Skip to content

Commit

Permalink
plerkle: remove (#38)
Browse files Browse the repository at this point in the history
* update yanked ahash

* fmt / clippy

* remove plerkle AccountInfo

* remove plerkle TransactionInfo

* more refs

* increment the crate version for blockbuster

* fix: upgrade solana deps to 1.17

* refactor: used published version of plerkle_serialization 1.8.0

* refactor: increase version to 2.0.0 some refactoring of instruction parsing

---------

Co-authored-by: Kyle Espinola <[email protected]>
  • Loading branch information
fanatid and kespinola authored Mar 8, 2024
1 parent e80fdef commit b06ad27
Show file tree
Hide file tree
Showing 13 changed files with 712 additions and 790 deletions.
819 changes: 379 additions & 440 deletions Cargo.lock

Large diffs are not rendered by default.

6 changes: 6 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,9 @@
members = [
"blockbuster",
]
resolver = "2"

[workspace.lints.clippy]
clone_on_ref_ptr = "deny"
missing_const_for_fn = "deny"
trivially_copy_pass_by_ref = "deny"
32 changes: 16 additions & 16 deletions blockbuster/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,34 +1,34 @@
[package]
name = "blockbuster"
description = "Metaplex canonical program parsers, for indexing, analytics etc...."
version = "1.0.1"
version = "2.0.0"
authors = ["Metaplex Developers <[email protected]>"]
repository = "https://github.com/metaplex-foundation/blockbuster"
license = "AGPL-3.0"
edition = "2021"
readme = "../README.md"

[dependencies]
spl-account-compression = { version = "0.2.0", features = ["no-entrypoint"] }
spl-noop = { version = "0.2.0", features = ["no-entrypoint"] }
mpl-bubblegum = "1.2.0"
mpl-token-metadata = { version = "4.1.1", features = ["serde"] }
plerkle_serialization = { version = "1.6.0" }
spl-token = { version = "4.0.0", features = ["no-entrypoint"] }
anchor-lang = { version = "0.29.0" }
async-trait = "0.1.57"
borsh = "~0.10.3"
bs58 = "0.4.0"
lazy_static = "1.4.0"
flatbuffers = "23.1.21"
borsh = "~0.10.3"
thiserror = "1.0.32"
solana-sdk = "~1.16.11"
anchor-lang = { version = "0.28.0"}
log = "0.4.17"
mpl-bubblegum = "1.2.0"
mpl-token-metadata = { version = "4.1.1", features = ["serde"] }
solana-sdk = "~1.17"
solana-transaction-status = "~1.17"
spl-account-compression = { version = "0.3.0", features = ["no-entrypoint"] }
spl-noop = { version = "0.2.0", features = ["no-entrypoint"] }
spl-token = { version = "4.0.0", features = ["no-entrypoint"] }
thiserror = "1.0.32"

[dev-dependencies]
flatbuffers = "23.1.21"
plerkle_serialization = "1.8.0"
rand = "0.8.5"
serde_json = "1.0.89"
solana-client = "~1.17"
solana-geyser-plugin-interface = "~1.17"
spl-concurrent-merkle-tree = "0.2.0"
solana-client = "~1.16.11"
solana-transaction-status = "~1.16.11"
solana-geyser-plugin-interface = "~1.16.11"
serde_json="1.0.89"
7 changes: 0 additions & 7 deletions blockbuster/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::io::Error;
use thiserror::Error;
use plerkle_serialization::error::PlerkleSerializationError;

#[derive(Error, Debug)]
pub enum BlockbusterError {
Expand Down Expand Up @@ -33,9 +32,3 @@ impl From<std::io::Error> for BlockbusterError {
BlockbusterError::IOError(err.to_string())
}
}

impl From<plerkle_serialization::error::PlerkleSerializationError> for BlockbusterError {
fn from(err: PlerkleSerializationError) -> Self {
BlockbusterError::IOError(err.to_string())
}
}
158 changes: 47 additions & 111 deletions blockbuster/src/instruction.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
use plerkle_serialization::{CompiledInstruction, Pubkey, TransactionInfo};
use solana_sdk::{instruction::CompiledInstruction, pubkey::Pubkey};
use solana_transaction_status::InnerInstructions;
use std::collections::{HashSet, VecDeque};

use std::{
cell::RefCell,
collections::{HashSet, VecDeque},
};

pub type IxPair<'a> = (Pubkey, CompiledInstruction<'a>);
pub type IxPair<'a> = (Pubkey, &'a CompiledInstruction);

#[derive(Debug, Clone, Copy)]
pub struct InstructionBundle<'a> {
pub txn_id: &'a str,
pub program: Pubkey,
pub instruction: Option<CompiledInstruction<'a>>,
pub inner_ix: Option<Vec<IxPair<'a>>>,
pub instruction: Option<&'a CompiledInstruction>,
pub inner_ix: Option<&'a [IxPair<'a>]>,
pub keys: &'a [Pubkey],
pub slot: u64,
}
Expand All @@ -20,7 +18,7 @@ impl<'a> Default for InstructionBundle<'a> {
fn default() -> Self {
InstructionBundle {
txn_id: "",
program: Pubkey::new(&[0; 32]),
program: Pubkey::new_from_array([0; 32]),
instruction: None,
inner_ix: None,
keys: &[],
Expand All @@ -30,123 +28,61 @@ impl<'a> Default for InstructionBundle<'a> {
}

pub fn order_instructions<'a>(
programs: HashSet<&[u8]>,
transaction_info: &'a TransactionInfo<'a>,
programs: &HashSet<Pubkey>,
account_keys: &[Pubkey],
message_instructions: &'a [CompiledInstruction],
meta_inner_instructions: &'a [InnerInstructions],
) -> VecDeque<(IxPair<'a>, Option<Vec<IxPair<'a>>>)> {
let mut ordered_ixs: VecDeque<(IxPair, Option<Vec<IxPair>>)> = VecDeque::new();
// Get outer instructions.
let outer_instructions = match transaction_info.outer_instructions() {
None => {
println!("outer instructions deserialization error");
return ordered_ixs;
}
Some(instructions) => instructions,
};

if transaction_info.account_keys().is_none() {
return ordered_ixs;
}
// Get account keys.
let keys = RefCell::new(
transaction_info
.account_keys()
// Get inner instructions.
for (outer_instruction_index, message_instruction) in message_instructions.iter().enumerate() {
let non_hoisted_inner_instruction = meta_inner_instructions
.iter()
.filter_map(|ix| (ix.index == outer_instruction_index as u8).then(|| &ix.instructions))
.flatten()
.collect::<Vec<_>>(),
);
.map(|inner_ix| {
let cix = &inner_ix.instruction;
(account_keys[cix.program_id_index as usize], cix)
})
.collect::<Vec<IxPair>>();

// Get inner instructions.
let legacy_inner_ix_list = transaction_info.inner_instructions();
let compiled_inner_instructions = transaction_info.compiled_inner_instructions();
for (outer_instruction_index, outer_instruction) in outer_instructions.iter().enumerate() {
let non_hoisted_inner_instruction =
if let Some(inner_instructions) = compiled_inner_instructions {
inner_instructions
.iter()
.filter(|x| x.index() == outer_instruction_index as u8)
.flat_map(|x| {
if let Some(ixes) = x.instructions() {
ixes.iter()
.filter_map(|ix| ix.compiled_instruction())
.map(|ix| {
let kb = keys.borrow();
(*kb[ix.program_id_index() as usize], ix)
})
.collect::<Vec<IxPair>>()
} else {
Vec::new()
}
})
.collect::<Vec<IxPair>>()
} else {
// legacy no stack height list must exist if no compiled or no processing will be done
let inner_instructions = legacy_inner_ix_list.unwrap();
inner_instructions
.iter()
.filter(|x| x.index() == outer_instruction_index as u8)
.flat_map(|x| {
if let Some(ixes) = x.instructions() {
ixes.iter()
.map(|ix| {
let kb = keys.borrow();
(*kb[ix.program_id_index() as usize], ix)
})
.collect::<Vec<IxPair>>()
} else {
Vec::new()
}
})
.collect::<Vec<IxPair>>()
};

let hoister = non_hoisted_inner_instruction.clone();
let hoisted = hoist_known_programs(&programs, hoister);

for h in hoisted {
ordered_ixs.push_back(h);
}
let hoisted = hoist_known_programs(programs, &non_hoisted_inner_instruction);
ordered_ixs.extend(hoisted);

if let Some(outer_program_id) =
account_keys.get(message_instruction.program_id_index as usize)
{
let kb = keys.borrow();
let outer_ix_program_id_index = outer_instruction.program_id_index() as usize;
let outer_program_id = kb.get(outer_ix_program_id_index);
if outer_program_id.is_none() {
eprintln!("outer program id deserialization error");
continue;
}
let outer_program_id = **outer_program_id.unwrap();
if programs.get(outer_program_id.0.as_ref()).is_some() {
if programs.contains(outer_program_id) {
ordered_ixs.push_back((
(outer_program_id, outer_instruction),
(*outer_program_id, message_instruction),
Some(non_hoisted_inner_instruction),
));
}
} else {
eprintln!("outer program id deserialization error");
}
}
ordered_ixs
}

fn hoist_known_programs<'a, 'b>(
programs: &'b HashSet<&'b [u8]>,
instructions: Vec<(Pubkey, CompiledInstruction<'a>)>,
fn hoist_known_programs<'a>(
programs: &HashSet<Pubkey>,
ix_pairs: &[IxPair<'a>],
) -> Vec<(IxPair<'a>, Option<Vec<IxPair<'a>>>)> {
let mut hoist = Vec::new();
// there must be a safe and less copy way to do this, I should only need to move CI, and copy the found nodes matching predicate on 172
for (index, (pid, ci)) in instructions.iter().enumerate() {
let clone_for_inner = instructions.clone();

if programs.get(pid.0.as_ref()).is_some() {
let mut inner_copy = vec![];
for new_inner_elem in clone_for_inner.into_iter().skip(index + 1) {
if pid.0 != new_inner_elem.0 .0 {
inner_copy.push(new_inner_elem);
} else {
break;
}
}

hoist.push(((*pid, *ci), Some(inner_copy)));
}
}
hoist
ix_pairs
.iter()
.enumerate()
.filter_map(|(index, &(pid, ci))| {
programs.contains(&pid).then(|| {
let inner_copy = ix_pairs
.iter()
.skip(index + 1)
.take_while(|&&(inner_pid, _)| inner_pid != pid)
.cloned()
.collect::<Vec<IxPair<'a>>>();
((pid, ci), Some(inner_copy))
})
})
.collect()
}
3 changes: 1 addition & 2 deletions blockbuster/src/program_handler.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use crate::{
error::BlockbusterError, instruction::InstructionBundle, programs::ProgramParseResult,
};
use plerkle_serialization::AccountInfo;
use solana_sdk::pubkey::Pubkey;

pub trait ParseResult: Sync + Send {
Expand Down Expand Up @@ -42,7 +41,7 @@ pub trait ProgramParser: Sync + Send {
fn handles_account_updates(&self) -> bool;
fn handle_account(
&self,
account_info: &AccountInfo,
_account_data: &[u8],
) -> Result<Box<dyn ParseResult>, BlockbusterError>;
fn handle_instruction(
&self,
Expand Down
Loading

0 comments on commit b06ad27

Please sign in to comment.