Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add getNftEditions rpc method to das #216

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

22 changes: 20 additions & 2 deletions das_api/src/api/api_impl.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use digital_asset_types::{
dao::{
scopes::asset::get_grouping,
scopes::asset::{get_grouping, get_nft_editions},
sea_orm_active_enums::{
OwnerType, RoyaltyTargetType, SpecificationAssetClass, SpecificationVersions,
},
Expand All @@ -14,8 +14,8 @@ use digital_asset_types::{
rpc::{
filter::{AssetSortBy, SearchConditionType},
response::GetGroupingResponse,
OwnershipModel, RoyaltyModel,
},
rpc::{OwnershipModel, RoyaltyModel},
};
use open_rpc_derive::document_rpc;
use sea_orm::{sea_query::ConditionType, ConnectionTrait, DbBackend, Statement};
Expand Down Expand Up @@ -501,6 +501,7 @@ impl ApiContract for DasApi {
.await
.map_err(Into::into)
}

async fn get_grouping(
self: &DasApi,
payload: GetGrouping,
Expand All @@ -516,4 +517,21 @@ impl ApiContract for DasApi {
group_size: gs.size,
})
}

async fn get_nft_editions(
self: &DasApi,
payload: GetNftEditions,
) -> Result<NftEditions, DasApiError> {
let GetNftEditions {
mint_address,
page,
limit,
} = payload;

let mint_address = validate_pubkey(mint_address.clone())?;

get_nft_editions(&self.db_connection, mint_address, limit, page)
.await
.map_err(Into::into)
}
}
17 changes: 16 additions & 1 deletion das_api/src/api/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use crate::error::DasApiError;
use async_trait::async_trait;
use digital_asset_types::rpc::filter::{AssetSortDirection, SearchConditionType};
use digital_asset_types::rpc::options::Options;
use digital_asset_types::rpc::response::{AssetList, TransactionSignatureList};
use digital_asset_types::rpc::response::{AssetList, NftEditions, TransactionSignatureList};
use digital_asset_types::rpc::{filter::AssetSorting, response::GetGroupingResponse};
use digital_asset_types::rpc::{Asset, AssetProof, Interface, OwnershipModel, RoyaltyModel};
use open_rpc_derive::{document_rpc, rpc};
Expand Down Expand Up @@ -147,6 +147,14 @@ pub struct GetGrouping {
pub group_value: String,
}

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub struct GetNftEditions {
pub mint_address: String,
pub page: Option<u32>,
pub limit: Option<u32>,
}

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Default)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub struct GetAssetSignatures {
Expand Down Expand Up @@ -251,4 +259,11 @@ pub trait ApiContract: Send + Sync + 'static {
summary = "Get a list of assets grouped by a specific authority"
)]
async fn get_grouping(&self, payload: GetGrouping) -> Result<GetGroupingResponse, DasApiError>;

#[rpc(
name = "getNftEditions",
params = "named",
summary = "Get all printable editions for a master edition NFT mint"
)]
async fn get_nft_editions(&self, payload: GetNftEditions) -> Result<NftEditions, DasApiError>;
}
10 changes: 10 additions & 0 deletions das_api/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,16 @@ impl RpcApiBuilder {
Ok(rpc_context.schema())
})?;

module.register_async_method("get_nft_editions", |rpc_params, rpc_context| async move {
let payload = rpc_params.parse::<GetNftEditions>()?;
rpc_context
.get_nft_editions(payload)
.await
.map_err(Into::into)
})?;

module.register_alias("getNftEditions", "get_nft_editions")?;

Ok(module)
}
}
1 change: 1 addition & 0 deletions digital_asset_types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ spl-concurrent-merkle-tree = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["macros"] }
url = { workspace = true }
mpl-token-metadata = { workspace = true }

[features]
default = ["json_types", "sql_types"]
Expand Down
92 changes: 88 additions & 4 deletions digital_asset_types/src/dao/scopes/asset.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,23 @@
use crate::{
dao::{
asset::{self},
asset_authority, asset_creators, asset_data, asset_grouping, cl_audits_v2,
asset_authority, asset_creators, asset_data, asset_grouping, asset_v1_account_attachments,
cl_audits_v2,
extensions::{self, instruction::PascalCase},
sea_orm_active_enums::Instruction,
sea_orm_active_enums::{Instruction, V1AccountAttachments},
Cursor, FullAsset, GroupingSize, Pagination,
},
rpc::filter::AssetSortDirection,
rpc::{
filter::AssetSortDirection,
response::{NftEdition, NftEditions},
},
};
use indexmap::IndexMap;
use sea_orm::{entity::*, query::*, ConnectionTrait, DbErr, Order};
use mpl_token_metadata::accounts::{Edition, MasterEdition};
use sea_orm::{entity::*, query::*, sea_query::Expr, ConnectionTrait, DbErr, Order};
use serde::de::DeserializeOwned;
use serde_json::Value;
use solana_sdk::pubkey::Pubkey;
use std::collections::HashMap;

pub fn paginate<T, C>(
Expand Down Expand Up @@ -553,3 +561,79 @@ fn filter_out_stale_creators(creators: &mut Vec<asset_creators::Model>) {
}
}
}

pub fn get_edition_data_from_json<T: DeserializeOwned>(data: Value) -> Result<T, DbErr> {
serde_json::from_value(data).map_err(|e| DbErr::Custom(e.to_string()))
}

pub async fn get_nft_editions(
conn: &impl ConnectionTrait,
mint_address: Pubkey,
limit: Option<u32>,
page: Option<u32>,
) -> Result<NftEditions, DbErr> {
let master_edition_pubkey = MasterEdition::find_pda(&mint_address).0;

let master_edition =
asset_v1_account_attachments::Entity::find_by_id(master_edition_pubkey.to_bytes().to_vec())
.one(conn)
.await?
.ok_or(DbErr::RecordNotFound(
"Master Edition not found".to_string(),
))?;

let limit = limit.unwrap_or(10);

let master_edition_data: MasterEdition = master_edition
.data
.clone()
.ok_or(DbErr::RecordNotFound(
"Master Edition data not found".to_string(),
))
.map(get_edition_data_from_json)??;

let nft_editions = asset_v1_account_attachments::Entity::find()
.filter(
asset_v1_account_attachments::Column::AttachmentType
.eq(V1AccountAttachments::Edition)
.and(asset_v1_account_attachments::Column::Data.is_not_null())
.and(Expr::cust(&format!(
"data->>'parent' = '{}'",
master_edition_pubkey
))),
)
.order_by_asc(asset_v1_account_attachments::Column::SlotUpdated)
.all(conn)
.await?;
Nagaprasadvr marked this conversation as resolved.
Show resolved Hide resolved

let nft_editions = nft_editions
.iter()
.map(|e| -> Result<NftEdition, DbErr> {
let data: Edition = e
.data
.clone()
.ok_or(DbErr::RecordNotFound("Edition data not found".to_string()))
.map(get_edition_data_from_json)??;
Nagaprasadvr marked this conversation as resolved.
Show resolved Hide resolved

Ok(NftEdition {
mint_address: e
.asset_id
.clone()
.map(|id| bs58::encode(id).into_string())
.unwrap_or("".to_string()),
edition_number: data.edition,
edition_address: bs58::encode(e.id.clone()).into_string(),
})
})
.collect::<Result<Vec<NftEdition>, _>>()?;

Ok(NftEditions {
total: nft_editions.len() as u32,
limit,
page,
master_edition_address: master_edition_pubkey.to_string(),
supply: master_edition_data.supply,
max_supply: master_edition_data.max_supply,
editions: nft_editions,
})
}
23 changes: 23 additions & 0 deletions digital_asset_types/src/rpc/response.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,3 +50,26 @@ pub struct TransactionSignatureList {
pub after: Option<String>,
pub items: Vec<(String, String)>,
}

#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Default, JsonSchema)]
#[serde(default)]

pub struct NftEdition {
pub mint_address: String,
pub edition_address: String,
pub edition_number: u64,
}

#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Default, JsonSchema)]
#[serde(default)]
pub struct NftEditions {
pub total: u32,
pub limit: u32,
#[serde(skip_serializing_if = "Option::is_none")]
pub page: Option<u32>,
pub master_edition_address: String,
pub supply: u64,
pub max_supply: Option<u64>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub editions: Vec<NftEdition>,
}
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
1 change: 1 addition & 0 deletions integration_tests/tests/integration_tests/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ mod cnft_tests;
mod common;
mod general_scenario_tests;
mod mpl_core_tests;
mod nft_editions_tests;
mod regular_nft_tests;
49 changes: 49 additions & 0 deletions integration_tests/tests/integration_tests/nft_editions_tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
use function_name::named;

use das_api::api::{self, ApiContract};

use itertools::Itertools;

use serial_test::serial;

use super::common::*;

#[tokio::test]
#[serial]
#[named]
async fn test_get_nft_editions() {
let name = trim_test_name(function_name!());
let setup = TestSetup::new_with_options(
name.clone(),
TestSetupOptions {
network: Some(Network::Mainnet),
},
)
.await;

let seeds: Vec<SeedEvent> = seed_accounts([
"Ey2Qb8kLctbchQsMnhZs5DjY32To2QtPuXNwWvk4NosL",
"9ZmY7qCaq7WbrR7RZdHWCNS9FrFRPwRqU84wzWfmqLDz",
"8SHfqzJYABeGfiG1apwiEYt6TvfGQiL1pdwEjvTKsyiZ",
"GJvFDcBWf6aDncd1TBzx2ou1rgLFYaMBdbYLBa9oTAEw",
"9ZmY7qCaq7WbrR7RZdHWCNS9FrFRPwRqU84wzWfmqLDz",
"AoxgzXKEsJmUyF5pBb3djn9cJFA26zh2SQHvd9EYijZV",
"9yQecKKYSHxez7fFjJkUvkz42TLmkoXzhyZxEf2pw8pz",
"4V9QuYLpiMu4ZQmhdEHmgATdgiHkDeJfvZi84BfkYcez",
"giWoA4jqHFkodPJgtbRYRcYtiXbsVytnxnEao3QT2gg",
]);

apply_migrations_and_delete_data(setup.db.clone()).await;
index_seed_events(&setup, seeds.iter().collect_vec()).await;

let request = r#"
{
"mintAddress": "Ey2Qb8kLctbchQsMnhZs5DjY32To2QtPuXNwWvk4NosL"
}
"#;

let request: api::GetNftEditions = serde_json::from_str(request).unwrap();
let response = setup.das_api.get_nft_editions(request).await.unwrap();

insta::assert_json_snapshot!(name, response);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
---
source: integration_tests/tests/integration_tests/nft_editions_tests.rs
expression: response
snapshot_kind: text
---
{
"total": 2,
"limit": 10,
"master_edition_address": "8SHfqzJYABeGfiG1apwiEYt6TvfGQiL1pdwEjvTKsyiZ",
"supply": 60,
"max_supply": 69,
"editions": [
{
"mint_address": "GJvFDcBWf6aDncd1TBzx2ou1rgLFYaMBdbYLBa9oTAEw",
"edition_address": "AoxgzXKEsJmUyF5pBb3djn9cJFA26zh2SQHvd9EYijZV",
"edition_number": 1
},
{
"mint_address": "9yQecKKYSHxez7fFjJkUvkz42TLmkoXzhyZxEf2pw8pz",
"edition_address": "giWoA4jqHFkodPJgtbRYRcYtiXbsVytnxnEao3QT2gg",
"edition_number": 2
}
]
}
Loading