diff --git a/ballista/rust/core/src/execution_plans/distributed_query.rs b/ballista/rust/core/src/execution_plans/distributed_query.rs index d6b3c3da96a0..d53df94aebc9 100644 --- a/ballista/rust/core/src/execution_plans/distributed_query.rs +++ b/ballista/rust/core/src/execution_plans/distributed_query.rs @@ -289,7 +289,8 @@ async fn fetch_partition( BallistaClient::try_new(metadata.host.as_str(), metadata.port as u16) .await .map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?; - Ok(ballista_client + + ballista_client .fetch_partition( &partition_id.job_id, partition_id.stage_id as usize, @@ -297,5 +298,5 @@ async fn fetch_partition( &location.path, ) .await - .map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?) + .map_err(|e| DataFusionError::Execution(format!("{:?}", e))) } diff --git a/ballista/rust/core/src/execution_plans/shuffle_reader.rs b/ballista/rust/core/src/execution_plans/shuffle_reader.rs index 7482c1843ed3..73e5aebca7e0 100644 --- a/ballista/rust/core/src/execution_plans/shuffle_reader.rs +++ b/ballista/rust/core/src/execution_plans/shuffle_reader.rs @@ -212,7 +212,7 @@ async fn fetch_partition( BallistaClient::try_new(metadata.host.as_str(), metadata.port as u16) .await .map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?; - Ok(ballista_client + ballista_client .fetch_partition( &partition_id.job_id, partition_id.stage_id as usize, @@ -220,7 +220,7 @@ async fn fetch_partition( &location.path, ) .await - .map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?) + .map_err(|e| DataFusionError::Execution(format!("{:?}", e))) } #[cfg(test)] diff --git a/ballista/rust/scheduler/src/lib.rs b/ballista/rust/scheduler/src/lib.rs index 3459cce9a4bf..92de88ae728d 100644 --- a/ballista/rust/scheduler/src/lib.rs +++ b/ballista/rust/scheduler/src/lib.rs @@ -232,7 +232,7 @@ impl SchedulerServer, + available_executors: &mut [ExecutorData], job_id: &str, ) -> Result<(Vec>, usize), BallistaError> { let mut ret: Vec> = diff --git a/ballista/rust/scheduler/src/main.rs b/ballista/rust/scheduler/src/main.rs index c9066027e387..cb37627201ab 100644 --- a/ballista/rust/scheduler/src/main.rs +++ b/ballista/rust/scheduler/src/main.rs @@ -106,7 +106,7 @@ async fn start_server( ), }; - Ok(Server::bind(&addr) + Server::bind(&addr) .serve(make_service_fn(move |request: &AddrStream| { let scheduler_grpc_server = SchedulerGrpcServer::new(scheduler_server.clone()); @@ -145,7 +145,7 @@ async fn start_server( )) })) .await - .context("Could not start grpc server")?) + .context("Could not start grpc server") } #[tokio::main] diff --git a/datafusion/src/datasource/file_format/parquet.rs b/datafusion/src/datasource/file_format/parquet.rs index 4afb2f54c3ab..d1d26e2c6d42 100644 --- a/datafusion/src/datasource/file_format/parquet.rs +++ b/datafusion/src/datasource/file_format/parquet.rs @@ -122,8 +122,8 @@ impl FileFormat for ParquetFormat { } fn summarize_min_max( - max_values: &mut Vec>, - min_values: &mut Vec>, + max_values: &mut [Option], + min_values: &mut [Option], fields: &[Field], i: usize, stat: &ParquetStatistics, diff --git a/datafusion/src/datasource/mod.rs b/datafusion/src/datasource/mod.rs index 33512b40cef7..9a7b17d1a867 100644 --- a/datafusion/src/datasource/mod.rs +++ b/datafusion/src/datasource/mod.rs @@ -177,8 +177,8 @@ fn create_max_min_accs( fn get_col_stats( schema: &Schema, null_counts: Vec, - max_values: &mut Vec>, - min_values: &mut Vec>, + max_values: &mut [Option], + min_values: &mut [Option], ) -> Vec { (0..schema.fields().len()) .map(|i| { diff --git a/datafusion/src/logical_plan/extension.rs b/datafusion/src/logical_plan/extension.rs index 43bf96ffb072..ee19ad43ecfb 100644 --- a/datafusion/src/logical_plan/extension.rs +++ b/datafusion/src/logical_plan/extension.rs @@ -71,6 +71,7 @@ pub trait UserDefinedLogicalNode: fmt::Debug { /// of self.inputs and self.exprs. /// /// So, `self.from_template(exprs, ..).expressions() == exprs + #[allow(clippy::wrong_self_convention)] fn from_template( &self, exprs: &[Expr], diff --git a/datafusion/src/physical_plan/hash_utils.rs b/datafusion/src/physical_plan/hash_utils.rs index 27a5376cf749..00073a6592ce 100644 --- a/datafusion/src/physical_plan/hash_utils.rs +++ b/datafusion/src/physical_plan/hash_utils.rs @@ -42,7 +42,7 @@ fn combine_hashes(l: u64, r: u64) -> u64 { fn hash_decimal128<'a>( array: &ArrayRef, random_state: &RandomState, - hashes_buffer: &'a mut Vec, + hashes_buffer: &'a mut [u64], mul_col: bool, ) { let array = array.as_any().downcast_ref::().unwrap(); @@ -207,7 +207,7 @@ macro_rules! hash_array_float { fn create_hashes_dictionary( array: &ArrayRef, random_state: &RandomState, - hashes_buffer: &mut Vec, + hashes_buffer: &mut [u64], multi_col: bool, ) -> Result<()> { let dict_array = array.as_any().downcast_ref::>().unwrap(); diff --git a/datafusion/src/physical_plan/tdigest/mod.rs b/datafusion/src/physical_plan/tdigest/mod.rs index 5bd8b9e35fb0..4268bcf03960 100644 --- a/datafusion/src/physical_plan/tdigest/mod.rs +++ b/datafusion/src/physical_plan/tdigest/mod.rs @@ -359,7 +359,7 @@ impl TDigest { } fn external_merge( - centroids: &mut Vec, + centroids: &mut [Centroid], first: usize, middle: usize, last: usize, @@ -582,23 +582,23 @@ impl TDigest { /// ┌────────┬────────┬────────┬───────┬────────┬────────┐ /// │max_size│ sum │ count │ max │ min │centroid│ /// └────────┴────────┴────────┴───────┴────────┴────────┘ - /// │ - /// ┌─────────────────────┘ - /// ▼ - /// ┌ List ───┐ - /// │┌ ─ ─ ─ ┐│ - /// │ mean │ - /// │├ ─ ─ ─ ┼│─ ─ Centroid 1 - /// │ weight │ - /// │└ ─ ─ ─ ┘│ - /// │ │ - /// │┌ ─ ─ ─ ┐│ - /// │ mean │ - /// │├ ─ ─ ─ ┼│─ ─ Centroid 2 - /// │ weight │ - /// │└ ─ ─ ─ ┘│ - /// │ │ - /// ... + /// │ + /// ┌─────────────────────┘ + /// ▼ + /// ┌ List ───┐ + /// │┌ ─ ─ ─ ┐│ + /// │ mean │ + /// │├ ─ ─ ─ ┼│─ ─ Centroid 1 + /// │ weight │ + /// │└ ─ ─ ─ ┘│ + /// │ │ + /// │┌ ─ ─ ─ ┐│ + /// │ mean │ + /// │├ ─ ─ ─ ┼│─ ─ Centroid 2 + /// │ weight │ + /// │└ ─ ─ ─ ┘│ + /// │ │ + /// ... /// /// ``` ///