Skip to content

Commit

Permalink
address clippies in root workspace
Browse files Browse the repository at this point in the history
  • Loading branch information
Igosuki committed Jan 12, 2022
1 parent 171332f commit 4344454
Show file tree
Hide file tree
Showing 10 changed files with 423 additions and 416 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,8 @@ jobs:
cargo test --no-default-features
cargo run --example csv_sql
cargo run --example parquet_sql
# cargo run --example avro_sql --features=datafusion/avro
#nopass
cargo run --example avro_sql --features=datafusion/avro
env:
CARGO_HOME: "/github/home/.cargo"
CARGO_TARGET_DIR: "/github/home/target"
Expand All @@ -127,6 +128,7 @@ jobs:
export PARQUET_TEST_DATA=$(pwd)/parquet-testing/data
cd ballista/rust
# snmalloc requires cmake so build without default features
#nopass
cargo test --no-default-features --features sled
env:
CARGO_HOME: "/github/home/.cargo"
Expand Down
4 changes: 1 addition & 3 deletions ballista/rust/executor/src/executor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,7 @@ impl Executor {
job_id,
stage_id,
part,
DisplayableExecutionPlan::with_metrics(&exec)
.indent()
.to_string()
DisplayableExecutionPlan::with_metrics(&exec).indent()
);

Ok(partitions)
Expand Down
4 changes: 2 additions & 2 deletions ballista/rust/scheduler/src/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ mod test {
.plan_query_stages(&job_uuid.to_string(), plan)
.await?;
for stage in &stages {
println!("{}", displayable(stage.as_ref()).indent().to_string());
println!("{}", displayable(stage.as_ref()).indent());
}

/* Expected result:
Expand Down Expand Up @@ -407,7 +407,7 @@ order by
.plan_query_stages(&job_uuid.to_string(), plan)
.await?;
for stage in &stages {
println!("{}", displayable(stage.as_ref()).indent().to_string());
println!("{}", displayable(stage.as_ref()).indent());
}

/* Expected result:
Expand Down
6 changes: 2 additions & 4 deletions benchmarks/src/bin/tpch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -540,16 +540,14 @@ async fn execute_query(
if debug {
println!(
"=== Physical plan ===\n{}\n",
displayable(physical_plan.as_ref()).indent().to_string()
displayable(physical_plan.as_ref()).indent()
);
}
let result = collect(physical_plan.clone()).await?;
if debug {
println!(
"=== Physical plan with metrics ===\n{}\n",
DisplayableExecutionPlan::with_metrics(physical_plan.as_ref())
.indent()
.to_string()
DisplayableExecutionPlan::with_metrics(physical_plan.as_ref()).indent()
);
print::print(&result);
}
Expand Down
1 change: 1 addition & 0 deletions datafusion/src/physical_plan/expressions/rank.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ pub struct Rank {
}

#[derive(Debug, Copy, Clone)]
#[allow(clippy::enum_variant_names)]
pub(crate) enum RankType {
Rank,
DenseRank,
Expand Down
7 changes: 1 addition & 6 deletions datafusion/src/physical_plan/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -341,12 +341,7 @@ macro_rules! get_min_max_values {
};

let data_type = field.data_type();
let null_scalar: ScalarValue = if let Ok(v) = data_type.try_into() {
v
} else {
// DataFusion doesn't have support for ScalarValues of the column type
return None
};
let null_scalar: ScalarValue = data_type.try_into().ok()?;

let scalar_values : Vec<ScalarValue> = $self.row_group_metadata
.iter()
Expand Down
Loading

0 comments on commit 4344454

Please sign in to comment.