Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updating test workflow #124

Merged
merged 6 commits into from
Oct 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
128 changes: 109 additions & 19 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -1,31 +1,121 @@
name: Build & Test
name: Build das api components
# This workflow uses github runners.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

# This may be adjusted to whatever suits best your runners config.
# Current config will build on manual trigger or pull-request (each push)
on:
push:
# pull_request can be removed, to save minutes on github runners
Juanito87 marked this conversation as resolved.
Show resolved Hide resolved
pull_request:
workflow_dispatch:
push:
branches:
- main
- 'main'

env:
RUST_TOOLCHAIN: stable
CARGO_TERM_COLOR: always

jobs:
build-api:
runs-on: buildjet-4vcpu-ubuntu-2204
steps:
- uses: actions/checkout@v3
- name: Build
run: |
cd das_api
cargo build
shell: bash
build-ingest:
runs-on: buildjet-8vcpu-ubuntu-2204
strategy:
matrix:
os: [ubuntu-20.04, ubuntu-22.04]
# This can be also be runned on self-hosted github runners
runs-on: ["${{ matrix.os }}"]

steps:
- uses: actions/checkout@v3
- name: Build
- name: checkout repo
uses: actions/checkout@v3
# This step can be omited, to save storage space on the organization account
# Build process will take longer
- name: set build cache
uses: actions/cache@v3
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
digital-asset-rpc-infrastructure/target/
key: ${{ matrix.os }}_digital-asset-rpc-infrastructure_${{ hashFiles('digital-asset-rpc-infrastructure/Cargo.lock') }}
restore-keys: |
${{ matrix.os }}_digital-asset-rpc-infrastructure

- name: build digital asset rpc infra
run: cargo build --verbose --release

- name: build das_api
working-directory: das_api
run: cargo build --verbose --release

- name: build migration
working-directory: migration
run: cargo build --verbose --release

- name: rename binaries for ubuntu22 release
if: matrix.os == 'ubuntu-22.04'
run: |
cd nft_ingester
cargo build
shell: bash
mv target/release/nft_ingester target/release/nft_ingester22
mv target/release/fetch_trees target/release/fetch_trees22
mv das_api/target/release/das_api das_api/target/release/das_api22
mv migration/target/release/migration migration/target/release/migration22

# This steps can be omited to save space, are mostly in place to validate binaries (manually) and path to them
# Omiting this will save on storage consumption on the account
- name: Publish artifact
if: matrix.os == 'ubuntu-22.04'
uses: actions/[email protected]
with:
name: nft_ingester22
path: target/release/nft_ingester22

- name: Publish artifact
if: matrix.os == 'ubuntu-22.04'
uses: actions/[email protected]
with:
name: das_api22
path: das_api/target/release/das_api22

- name: Publish artifact
if: matrix.os == 'ubuntu-22.04'
uses: actions/[email protected]
with:
name: migration22
path: migration/target/release/migration22

- name: Publish artifact
if: matrix.os == 'ubuntu-22.04'
uses: actions/[email protected]
with:
name: fetch-trees22
path: migration/target/release/fetch_trees22

- name: Publish artifact
if: matrix.os == 'ubuntu-20.04'
uses: actions/[email protected]
with:
name: nft_ingester
path: target/release/nft_ingester

- name: Publish artifact
if: matrix.os == 'ubuntu-20.04'
uses: actions/[email protected]
with:
name: das_api
path: das_api/target/release/das_api

- name: Publish artifact
if: matrix.os == 'ubuntu-20.04'
uses: actions/[email protected]
with:
name: migration
path: migration/target/release/migration

- name: Publish artifact
if: matrix.os == 'ubuntu-20.04'
uses: actions/[email protected]
with:
name: fetch-trees
path: target/release/fetch_trees
38 changes: 20 additions & 18 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ members = [
"nft_ingester",
"tools/acc_forwarder",
"tools/bgtask_creator",
"tools/fetch_trees",
"tools/load_generation",
"tools/tree-status",
"tools/txn_forwarder",
Expand Down
1 change: 1 addition & 0 deletions migration/src/m20230918_182123_add_raw_name_symbol.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use digital_asset_types::dao::asset_data;
use sea_orm_migration::prelude::*;

#[derive(DeriveMigrationName)]
Expand Down
17 changes: 17 additions & 0 deletions tools/fetch_trees/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[package]
name = "fetch_trees"
version = "0.7.12"
edition = "2021"
publish = false

[dependencies]
anyhow = "1.0.70"
async-trait = "0.1.53"
borsh = "0.9.1"
clap = { version = "4.2.2", features = ["derive", "cargo"] }
mpl-bubblegum = { git = "https://github.com/metaplex-foundation/mpl-bubblegum.git", rev = "3cb3976d", features = ["no-entrypoint"] }
solana-account-decoder = "~1.14"
solana-client = "~1.14"
solana-sdk = "~1.14"
spl-account-compression = "0.1.8"
tokio = { version = "1.26.0", features = ["full", "tracing"] }
88 changes: 88 additions & 0 deletions tools/fetch_trees/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
use {
borsh::{BorshDeserialize, BorshSerialize},
clap::Parser,
solana_account_decoder::UiAccountEncoding,
solana_client::{
nonblocking::rpc_client::RpcClient,
rpc_config::{RpcAccountInfoConfig, RpcProgramAccountsConfig},
rpc_filter::{Memcmp, RpcFilterType},
rpc_request::MAX_MULTIPLE_ACCOUNTS,
},
solana_sdk::{
account::Account,
pubkey::{Pubkey, PUBKEY_BYTES},
},
spl_account_compression::state::{ConcurrentMerkleTreeHeader, ConcurrentMerkleTreeHeaderData},
};

#[derive(Debug, Parser)]
struct Args {
// Solana RPC endpoint
#[arg(long, short)]
rpc: String,
}

#[tokio::main]
async fn main() -> anyhow::Result<()> {
let args = Args::parse();

let client = RpcClient::new(args.rpc);

// Initialized SPL Account Compression accounts
let config = RpcProgramAccountsConfig {
filters: Some(vec![RpcFilterType::Memcmp(Memcmp::new_raw_bytes(
0,
vec![1u8],
))]),
account_config: RpcAccountInfoConfig {
encoding: Some(UiAccountEncoding::Base64),
..Default::default()
},
..Default::default()
};
let accounts: Vec<(Pubkey, Account)> = client
.get_program_accounts_with_config(&spl_account_compression::id(), config)
.await?;
println!("Received {} accounts", accounts.len());

// Trying to extract authority pubkey
let accounts = accounts
.into_iter()
.filter_map(|(pubkey, account)| {
get_authority(&account.data)
.ok()
.map(|authority| (pubkey, authority))
})
.collect::<Vec<_>>();
println!("Successfully parsed {} accounts", accounts.len());

// Print only accounts where authority owner is bubblegum
let mut id = 1;
for accounts in accounts.chunks(MAX_MULTIPLE_ACCOUNTS) {
let pubkeys = accounts
.iter()
.map(|(_pubkey, authority)| *authority)
.collect::<Vec<_>>();
let authority_accounts = client.get_multiple_accounts(pubkeys.as_slice()).await?;
for (authority_account, (pubkey, _authority)) in authority_accounts.iter().zip(accounts) {
if let Some(account) = authority_account {
if account.owner == mpl_bubblegum::id() {
println!("{} {}", id, pubkey);
id += 1;
}
}
}
}

Ok(())
}

fn get_authority(mut data: &[u8]) -> anyhow::Result<Pubkey> {
// additional checks
let header = ConcurrentMerkleTreeHeader::deserialize(&mut data)?;
let ConcurrentMerkleTreeHeaderData::V1(header) = header.header;
let data = header.try_to_vec()?;

let offset = 4 + 4;
Pubkey::try_from(&data[offset..offset + PUBKEY_BYTES]).map_err(Into::into)
}
Loading