Skip to content

Commit

Permalink
Fix clippy on 7.x branch (#2191)
Browse files Browse the repository at this point in the history
  • Loading branch information
alamb authored Apr 10, 2022
1 parent 5aa689f commit b1ef00b
Show file tree
Hide file tree
Showing 9 changed files with 33 additions and 31 deletions.
5 changes: 3 additions & 2 deletions ballista/rust/core/src/execution_plans/distributed_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -289,13 +289,14 @@ async fn fetch_partition(
BallistaClient::try_new(metadata.host.as_str(), metadata.port as u16)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?;
Ok(ballista_client

ballista_client
.fetch_partition(
&partition_id.job_id,
partition_id.stage_id as usize,
partition_id.partition_id as usize,
&location.path,
)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?)
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))
}
4 changes: 2 additions & 2 deletions ballista/rust/core/src/execution_plans/shuffle_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -212,15 +212,15 @@ async fn fetch_partition(
BallistaClient::try_new(metadata.host.as_str(), metadata.port as u16)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?;
Ok(ballista_client
ballista_client
.fetch_partition(
&partition_id.job_id,
partition_id.stage_id as usize,
partition_id.partition_id as usize,
&location.path,
)
.await
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?)
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))
}

#[cfg(test)]
Expand Down
2 changes: 1 addition & 1 deletion ballista/rust/scheduler/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ impl<T: 'static + AsLogicalPlan, U: 'static + AsExecutionPlan> SchedulerServer<T

async fn fetch_tasks(
&self,
available_executors: &mut Vec<ExecutorData>,
available_executors: &mut [ExecutorData],
job_id: &str,
) -> Result<(Vec<Vec<TaskDefinition>>, usize), BallistaError> {
let mut ret: Vec<Vec<TaskDefinition>> =
Expand Down
4 changes: 2 additions & 2 deletions ballista/rust/scheduler/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ async fn start_server(
),
};

Ok(Server::bind(&addr)
Server::bind(&addr)
.serve(make_service_fn(move |request: &AddrStream| {
let scheduler_grpc_server =
SchedulerGrpcServer::new(scheduler_server.clone());
Expand Down Expand Up @@ -145,7 +145,7 @@ async fn start_server(
))
}))
.await
.context("Could not start grpc server")?)
.context("Could not start grpc server")
}

#[tokio::main]
Expand Down
4 changes: 2 additions & 2 deletions datafusion/src/datasource/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,8 @@ impl FileFormat for ParquetFormat {
}

fn summarize_min_max(
max_values: &mut Vec<Option<MaxAccumulator>>,
min_values: &mut Vec<Option<MinAccumulator>>,
max_values: &mut [Option<MaxAccumulator>],
min_values: &mut [Option<MinAccumulator>],
fields: &[Field],
i: usize,
stat: &ParquetStatistics,
Expand Down
4 changes: 2 additions & 2 deletions datafusion/src/datasource/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,8 +177,8 @@ fn create_max_min_accs(
fn get_col_stats(
schema: &Schema,
null_counts: Vec<usize>,
max_values: &mut Vec<Option<MaxAccumulator>>,
min_values: &mut Vec<Option<MinAccumulator>>,
max_values: &mut [Option<MaxAccumulator>],
min_values: &mut [Option<MinAccumulator>],
) -> Vec<ColumnStatistics> {
(0..schema.fields().len())
.map(|i| {
Expand Down
1 change: 1 addition & 0 deletions datafusion/src/logical_plan/extension.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ pub trait UserDefinedLogicalNode: fmt::Debug {
/// of self.inputs and self.exprs.
///
/// So, `self.from_template(exprs, ..).expressions() == exprs
#[allow(clippy::wrong_self_convention)]
fn from_template(
&self,
exprs: &[Expr],
Expand Down
4 changes: 2 additions & 2 deletions datafusion/src/physical_plan/hash_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ fn combine_hashes(l: u64, r: u64) -> u64 {
fn hash_decimal128<'a>(
array: &ArrayRef,
random_state: &RandomState,
hashes_buffer: &'a mut Vec<u64>,
hashes_buffer: &'a mut [u64],
mul_col: bool,
) {
let array = array.as_any().downcast_ref::<DecimalArray>().unwrap();
Expand Down Expand Up @@ -207,7 +207,7 @@ macro_rules! hash_array_float {
fn create_hashes_dictionary<K: ArrowDictionaryKeyType>(
array: &ArrayRef,
random_state: &RandomState,
hashes_buffer: &mut Vec<u64>,
hashes_buffer: &mut [u64],
multi_col: bool,
) -> Result<()> {
let dict_array = array.as_any().downcast_ref::<DictionaryArray<K>>().unwrap();
Expand Down
36 changes: 18 additions & 18 deletions datafusion/src/physical_plan/tdigest/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,7 @@ impl TDigest {
}

fn external_merge(
centroids: &mut Vec<Centroid>,
centroids: &mut [Centroid],
first: usize,
middle: usize,
last: usize,
Expand Down Expand Up @@ -582,23 +582,23 @@ impl TDigest {
/// ┌────────┬────────┬────────┬───────┬────────┬────────┐
/// │max_size│ sum │ count │ max │ min │centroid│
/// └────────┴────────┴────────┴───────┴────────┴────────┘
/// │
/// ┌─────────────────────┘
/// ▼
/// ┌ List ───┐
/// │┌ ─ ─ ─ ┐│
/// │ mean │
/// │├ ─ ─ ─ ┼│─ ─ Centroid 1
/// │ weight │
/// │└ ─ ─ ─ ┘│
/// │ │
/// │┌ ─ ─ ─ ┐│
/// │ mean │
/// │├ ─ ─ ─ ┼│─ ─ Centroid 2
/// │ weight │
/// │└ ─ ─ ─ ┘│
/// │ │
/// ...
/// │
/// ┌─────────────────────┘
/// ▼
/// ┌ List ───┐
/// │┌ ─ ─ ─ ┐│
/// │ mean │
/// │├ ─ ─ ─ ┼│─ ─ Centroid 1
/// │ weight │
/// │└ ─ ─ ─ ┘│
/// │ │
/// │┌ ─ ─ ─ ┐│
/// │ mean │
/// │├ ─ ─ ─ ┼│─ ─ Centroid 2
/// │ weight │
/// │└ ─ ─ ─ ┘│
/// │ │
/// ...
///
/// ```
///
Expand Down

0 comments on commit b1ef00b

Please sign in to comment.