Skip to content

Commit

Permalink
fix(pathfinder/config): rename Bloom filter load limit argument
Browse files Browse the repository at this point in the history
  • Loading branch information
kkovaacs committed Jan 23, 2024
1 parent 122a424 commit 387663b
Show file tree
Hide file tree
Showing 6 changed files with 20 additions and 13 deletions.
11 changes: 6 additions & 5 deletions crates/pathfinder/src/bin/pathfinder/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,13 +226,13 @@ This should only be enabled for debugging purposes as it adds substantial proces
get_events_max_blocks_to_scan: std::num::NonZeroUsize,

#[arg(
long = "rpc.get-events-max-bloom-filters-to-load",
long = "rpc.get-events-max-uncached-bloom-filters-to-load",
long_help = "The number of Bloom filters to load for events when querying for events. \
This limit is used to prevent queries from taking too long.",
env = "PATHFINDER_RPC_GET_EVENTS_MAX_BLOOM_FILTERS_TO_LOAD",
env = "PATHFINDER_RPC_GET_EVENTS_MAX_UNCACHED_BLOOM_FILTERS_TO_LOAD",
default_value = "100000"
)]
get_events_max_bloom_filters_to_load: std::num::NonZeroUsize,
get_events_max_uncached_bloom_filters_to_load: std::num::NonZeroUsize,
}

#[derive(clap::ValueEnum, Debug, Clone, Copy, PartialEq)]
Expand Down Expand Up @@ -491,7 +491,7 @@ pub struct Config {
pub gateway_api_key: Option<String>,
pub event_bloom_filter_cache_size: NonZeroUsize,
pub get_events_max_blocks_to_scan: NonZeroUsize,
pub get_events_max_bloom_filters_to_load: NonZeroUsize,
pub get_events_max_uncached_bloom_filters_to_load: NonZeroUsize,
}

pub struct Ethereum {
Expand Down Expand Up @@ -664,7 +664,8 @@ impl Config {
gateway_api_key: cli.gateway_api_key,
event_bloom_filter_cache_size: cli.event_bloom_filter_cache_size,
get_events_max_blocks_to_scan: cli.get_events_max_blocks_to_scan,
get_events_max_bloom_filters_to_load: cli.get_events_max_bloom_filters_to_load,
get_events_max_uncached_bloom_filters_to_load: cli
.get_events_max_uncached_bloom_filters_to_load,
}
}
}
Expand Down
3 changes: 2 additions & 1 deletion crates/pathfinder/src/bin/pathfinder/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,8 @@ Hint: This is usually caused by exceeding the file descriptor limit of your syst
let rpc_config = pathfinder_rpc::context::RpcConfig {
batch_concurrency_limit: config.rpc_batch_concurrency_limit,
get_events_max_blocks_to_scan: config.get_events_max_blocks_to_scan,
get_events_max_bloom_filters_to_load: config.get_events_max_bloom_filters_to_load,
get_events_max_uncached_bloom_filters_to_load: config
.get_events_max_uncached_bloom_filters_to_load,
};

let context = pathfinder_rpc::context::RpcContext::new(
Expand Down
4 changes: 2 additions & 2 deletions crates/rpc/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use tokio::sync::watch as tokio_watch;
pub struct RpcConfig {
pub batch_concurrency_limit: NonZeroUsize,
pub get_events_max_blocks_to_scan: NonZeroUsize,
pub get_events_max_bloom_filters_to_load: NonZeroUsize,
pub get_events_max_uncached_bloom_filters_to_load: NonZeroUsize,
}

#[derive(Clone)]
Expand Down Expand Up @@ -83,7 +83,7 @@ impl RpcContext {
let config = RpcConfig {
batch_concurrency_limit: NonZeroUsize::new(8).unwrap(),
get_events_max_blocks_to_scan: NonZeroUsize::new(1000).unwrap(),
get_events_max_bloom_filters_to_load: NonZeroUsize::new(1000).unwrap(),
get_events_max_uncached_bloom_filters_to_load: NonZeroUsize::new(1000).unwrap(),
};

Self::new(
Expand Down
2 changes: 1 addition & 1 deletion crates/rpc/src/v03/method/get_events.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ pub async fn get_events(
.events(
&filter,
context.config.get_events_max_blocks_to_scan,
context.config.get_events_max_bloom_filters_to_load,
context.config.get_events_max_uncached_bloom_filters_to_load,
)
.map_err(|e| match e {
EventFilterError::PageSizeTooBig(_) => GetEventsError::PageSizeTooBig,
Expand Down
9 changes: 7 additions & 2 deletions crates/storage/src/connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -239,9 +239,14 @@ impl<'inner> Transaction<'inner> {
&self,
filter: &EventFilter,
max_blocks_to_scan: NonZeroUsize,
max_bloom_filters_to_load: NonZeroUsize,
max_uncached_bloom_filters_to_load: NonZeroUsize,
) -> Result<PageOfEvents, EventFilterError> {
event::get_events(self, filter, max_blocks_to_scan, max_bloom_filters_to_load)
event::get_events(
self,
filter,
max_blocks_to_scan,
max_uncached_bloom_filters_to_load,
)
}

pub fn insert_sierra_class(
Expand Down
4 changes: 2 additions & 2 deletions crates/storage/src/connection/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ pub(super) fn get_events(
tx: &Transaction<'_>,
filter: &EventFilter,
max_blocks_to_scan: NonZeroUsize,
max_bloom_filters_to_load: NonZeroUsize,
max_uncached_bloom_filters_to_load: NonZeroUsize,
) -> Result<PageOfEvents, EventFilterError> {
if filter.page_size > PAGE_SIZE_LIMIT {
return Err(EventFilterError::PageSizeTooBig(PAGE_SIZE_LIMIT));
Expand Down Expand Up @@ -179,7 +179,7 @@ pub(super) fn get_events(
block_number += 1;

// Check if we've reached our Bloom filter load limit
if bloom_filters_loaded >= max_bloom_filters_to_load.get() {
if bloom_filters_loaded >= max_uncached_bloom_filters_to_load.get() {
tracing::trace!("Bloom filter limit reached");
break ScanResult::ContinueFrom(block_number);
}
Expand Down

0 comments on commit 387663b

Please sign in to comment.