Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

no op metadata json requests based on slot #171

Open
wants to merge 8 commits into
base: grpc-ingest
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Api.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
FROM das-api/builder AS files
FROM rust:1.75-slim-bullseye
FROM rust:1.79-slim-bullseye
ARG APP=/usr/src/app
RUN apt update \
&& apt install -y curl ca-certificates tzdata \
Expand Down
4 changes: 2 additions & 2 deletions Builder.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM rust:1.75-bullseye AS builder
FROM rust:1.79-bullseye AS builder
RUN apt-get update -y && \
apt-get install -y build-essential make git

Expand All @@ -21,6 +21,6 @@ WORKDIR /rust
RUN --mount=type=cache,target=/rust/target,id=das-rust \
cargo build --release --bins && cp `find /rust/target/release -maxdepth 1 -type f | sed 's/^\.\///' | grep -v "\." ` /rust/bins

FROM rust:1.75-slim-bullseye as final
FROM rust:1.79-slim-bullseye as final
COPY --from=builder /rust/bins /das/
CMD echo "Built the DAS API bins!"
2 changes: 1 addition & 1 deletion Ingest.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
FROM das-api/builder AS files
FROM rust:1.75-slim-bullseye
FROM rust:1.79-slim-bullseye
ARG APP=/usr/src/app
RUN apt update \
&& apt install -y curl ca-certificates tzdata \
Expand Down
2 changes: 1 addition & 1 deletion Load.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
FROM das-api/builder AS files
FROM rust:1.75-slim-bullseye
FROM rust:1.79-slim-bullseye
ARG APP=/usr/src/app
RUN apt update \
&& apt install -y curl ca-certificates tzdata \
Expand Down
2 changes: 1 addition & 1 deletion Migrator.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
FROM das-api/builder AS files

FROM rust:1.75-bullseye
FROM rust:1.79-bullseye
COPY init.sql /init.sql
ENV INIT_FILE_PATH=/init.sql
COPY --from=files /das/migration /bins/migration
Expand Down
2 changes: 1 addition & 1 deletion Proxy.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM rust:1.75-bullseye AS builder
FROM rust:1.79-bullseye AS builder
RUN cargo install wasm-pack

RUN mkdir /rust
Expand Down
37 changes: 28 additions & 9 deletions core/src/metadata_json.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
use {
backon::{ExponentialBuilder, Retryable},
clap::Parser,
digital_asset_types::dao::asset_data,
digital_asset_types::dao::asset_data::{self},
futures::{future::BoxFuture, stream::FuturesUnordered, StreamExt},
indicatif::HumanDuration,
log::{debug, error},
reqwest::{Client, Url as ReqwestUrl},
sea_orm::{entity::*, SqlxPostgresConnector},
sea_orm::{entity::*, ConnectionTrait, SqlxPostgresConnector, TransactionTrait},
serde::{Deserialize, Serialize},
tokio::{
sync::mpsc::{error::SendError, unbounded_channel, UnboundedSender},
Expand Down Expand Up @@ -90,9 +90,8 @@ impl MetadataJsonDownloadWorkerArgs {
}

let pool = pool.clone();
let client = client.clone();

handlers.push(spawn_task(client, pool, download_metadata_info));
handlers.push(spawn_task(client.clone(), pool, download_metadata_info));
}

while handlers.next().await.is_some() {}
Expand Down Expand Up @@ -169,10 +168,10 @@ async fn fetch_metadata_json(
let response = client.get(url.clone()).send().await?;

match response.error_for_status() {
Ok(res) => res
Ok(res) => Ok(res
.json::<serde_json::Value>()
.await
.map_err(|source| FetchMetadataJsonError::Parse { source, url }),
.map_err(|source| FetchMetadataJsonError::Parse { source, url })?),
Err(source) => {
let status = source
.status()
Expand Down Expand Up @@ -206,6 +205,7 @@ pub async fn perform_metadata_json_task(
pool: sqlx::PgPool,
download_metadata_info: &DownloadMetadataInfo,
) -> Result<asset_data::Model, MetadataJsonTaskError> {
let conn = SqlxPostgresConnector::from_sqlx_postgres_pool(pool);
match fetch_metadata_json(client, &download_metadata_info.uri).await {
Ok(metadata) => {
let active_model = asset_data::ActiveModel {
Expand All @@ -215,13 +215,21 @@ pub async fn perform_metadata_json_task(
..Default::default()
};

let conn = SqlxPostgresConnector::from_sqlx_postgres_pool(pool);

let model = active_model.update(&conn).await?;

Ok(model)
}
Err(e) => Err(MetadataJsonTaskError::Fetch(e)),
Err(e) => {
let active_model = asset_data::ActiveModel {
id: Set(download_metadata_info.asset_data_id.clone()),
reindex: Set(Some(true)),
..Default::default()
};

active_model.update(&conn).await?;

Err(MetadataJsonTaskError::Fetch(e))
}
}
}

Expand All @@ -248,3 +256,14 @@ impl DownloadMetadata {
.map(|_| ())
}
}

pub async fn skip_metadata_json_download<T>(asset_data_id: &[u8], uri: &str, conn: &T) -> bool
where
T: ConnectionTrait + TransactionTrait,
{
asset_data::Entity::find_by_id(asset_data_id.to_vec())
.one(conn)
.await
.unwrap_or(None)
.is_some_and(|model| model.metadata_url.eq(uri))
}
2 changes: 1 addition & 1 deletion docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ services:
volumes:
- ./db-data/:/var/lib/postgresql/data/:rw
solana:
image: ghcr.io/metaplex-foundation/plerkle-test-validator:v1.9.0-1.75.0-v1.18.11
image: ghcr.io/metaplex-foundation/plerkle-test-validator:v1.9.0-1.79.0-v1.18.11
volumes:
- ./programs:/so/:ro
- ./ledger:/config:rw
Expand Down
64 changes: 37 additions & 27 deletions program_transformers/src/bubblegum/db.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use {
crate::error::{ProgramTransformerError, ProgramTransformerResult},
das_core::DownloadMetadataInfo,
das_core::{skip_metadata_json_download, DownloadMetadataInfo},
digital_asset_types::dao::{
asset, asset_authority, asset_creators, asset_data, asset_grouping, cl_audits_v2, cl_items,
sea_orm_active_enums::{
Expand Down Expand Up @@ -378,35 +378,45 @@ pub async fn upsert_asset_data<T>(
where
T: ConnectionTrait + TransactionTrait,
{
let model = asset_data::ActiveModel {
let skip_metadata_json_download = skip_metadata_json_download(&id, &metadata_url, txn).await;

let mut model = asset_data::ActiveModel {
id: ActiveValue::Set(id.clone()),
chain_data_mutability: ActiveValue::Set(chain_data_mutability),
chain_data: ActiveValue::Set(chain_data),
metadata_url: ActiveValue::Set(metadata_url.clone()),
metadata_mutability: ActiveValue::Set(metadata_mutability),
metadata: ActiveValue::Set(JsonValue::String("processing".to_string())),
slot_updated: ActiveValue::Set(slot_updated),
reindex: ActiveValue::Set(Some(true)),
raw_name: ActiveValue::Set(Some(raw_name)),
raw_symbol: ActiveValue::Set(Some(raw_symbol)),
base_info_seq: ActiveValue::Set(Some(seq)),
..Default::default()
};

let mut columns_to_update = vec![
asset_data::Column::ChainDataMutability,
asset_data::Column::ChainData,
asset_data::Column::MetadataMutability,
asset_data::Column::SlotUpdated,
asset_data::Column::RawName,
asset_data::Column::RawSymbol,
asset_data::Column::BaseInfoSeq,
];
if !skip_metadata_json_download {
model.metadata_url = ActiveValue::Set(metadata_url.clone());
model.metadata = ActiveValue::Set(JsonValue::String("processing".to_string()));
model.reindex = ActiveValue::Set(Some(true));

columns_to_update.extend_from_slice(&[
asset_data::Column::MetadataUrl,
asset_data::Column::Metadata,
asset_data::Column::Reindex,
]);
}

let mut query = asset_data::Entity::insert(model)
.on_conflict(
OnConflict::columns([asset_data::Column::Id])
.update_columns([
asset_data::Column::ChainDataMutability,
asset_data::Column::ChainData,
asset_data::Column::MetadataUrl,
asset_data::Column::MetadataMutability,
asset_data::Column::Metadata,
asset_data::Column::SlotUpdated,
asset_data::Column::Reindex,
asset_data::Column::RawName,
asset_data::Column::RawSymbol,
asset_data::Column::BaseInfoSeq,
])
.update_columns(columns_to_update)
.to_owned(),
)
.build(DbBackend::Postgres);
Expand All @@ -420,20 +430,20 @@ where
query.sql
);

let result = txn
.execute(query)
txn.execute(query)
.await
.map_err(|db_err| ProgramTransformerError::StorageWriteError(db_err.to_string()))?;

if result.rows_affected() > 0 {
Ok(Some(DownloadMetadataInfo::new(
id,
metadata_url,
slot_updated,
)))
} else {
Ok(None)
// If the metadata JSON already exists, skip the download.
if skip_metadata_json_download {
return Ok(None);
}

Ok(Some(DownloadMetadataInfo::new(
id,
metadata_url,
slot_updated,
)))
}

#[allow(clippy::too_many_arguments)]
Expand Down
51 changes: 34 additions & 17 deletions program_transformers/src/mpl_core_program/v1_asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use {
mpl_core::types::{Plugin, PluginAuthority, PluginType, UpdateAuthority},
programs::mpl_core_program::MplCoreAccountData,
},
das_core::skip_metadata_json_download,
digital_asset_types::{
dao::{
asset, asset_authority, asset_creators, asset_data, asset_grouping,
Expand All @@ -26,8 +27,7 @@ use {
entity::{ActiveValue, ColumnTrait, EntityTrait},
prelude::*,
query::{JsonValue, QueryFilter, QueryTrait},
sea_query::query::OnConflict,
sea_query::Expr,
sea_query::{query::OnConflict, Expr},
ConnectionTrait, CursorTrait, DbBackend, Statement, TransactionTrait,
},
serde_json::{value::Value, Map},
Expand Down Expand Up @@ -183,34 +183,46 @@ pub async fn save_v1_asset<T: ConnectionTrait + TransactionTrait>(
_ => ChainMutability::Mutable,
};

let asset_data_model = asset_data::ActiveModel {
let skip_metadata_json_download = skip_metadata_json_download(&id_vec, &uri, &txn).await;

let mut asset_data_model = asset_data::ActiveModel {
chain_data_mutability: ActiveValue::Set(chain_mutability),
chain_data: ActiveValue::Set(chain_data_json),
metadata_url: ActiveValue::Set(uri.clone()),
metadata: ActiveValue::Set(JsonValue::String("processing".to_string())),
metadata_mutability: ActiveValue::Set(Mutability::Mutable),
slot_updated: ActiveValue::Set(slot_i),
reindex: ActiveValue::Set(Some(true)),
id: ActiveValue::Set(id_vec.clone()),
raw_name: ActiveValue::Set(Some(name.to_vec())),
raw_symbol: ActiveValue::Set(None),
base_info_seq: ActiveValue::Set(Some(0)),
..Default::default()
};

let mut columns_to_update = vec![
asset_data::Column::ChainDataMutability,
asset_data::Column::ChainData,
asset_data::Column::MetadataMutability,
asset_data::Column::SlotUpdated,
asset_data::Column::RawName,
asset_data::Column::RawSymbol,
asset_data::Column::BaseInfoSeq,
];

if !skip_metadata_json_download {
asset_data_model.metadata_url = ActiveValue::Set(uri.clone());
asset_data_model.metadata = ActiveValue::Set(JsonValue::String("processing".to_string()));
asset_data_model.reindex = ActiveValue::Set(Some(true));

columns_to_update.extend_from_slice(&[
asset_data::Column::MetadataUrl,
asset_data::Column::Metadata,
asset_data::Column::Reindex,
]);
}

let mut query = asset_data::Entity::insert(asset_data_model)
.on_conflict(
OnConflict::columns([asset_data::Column::Id])
.update_columns([
asset_data::Column::ChainDataMutability,
asset_data::Column::ChainData,
asset_data::Column::MetadataUrl,
asset_data::Column::MetadataMutability,
asset_data::Column::SlotUpdated,
asset_data::Column::Reindex,
asset_data::Column::RawName,
asset_data::Column::RawSymbol,
asset_data::Column::BaseInfoSeq,
])
.update_columns(columns_to_update)
.to_owned(),
)
.build(DbBackend::Postgres);
Expand Down Expand Up @@ -470,6 +482,11 @@ pub async fn save_v1_asset<T: ConnectionTrait + TransactionTrait>(
return Ok(None);
}

// If the metadata JSON exists, skip downloading it.
if skip_metadata_json_download {
return Ok(None);
}

// Otherwise return with info for background downloading.
Ok(Some(DownloadMetadataInfo::new(id_vec.clone(), uri, slot_i)))
}
Expand Down
Loading
Loading