Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sort Merge Join #4

Closed
wants to merge 29 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,5 @@ members = [
exclude = ["python"]

[patch.crates-io]
arrow2 = { git = "https://github.com/jorgecarleitao/arrow2.git", rev = "b7e991366104d1647b955a828e0551256ef2e7c9" }
arrow-flight = { git = "https://github.com/jorgecarleitao/arrow2.git", rev = "b7e991366104d1647b955a828e0551256ef2e7c9" }
arrow2 = { path = "/Users/shenyijie/oss/arrow2" }
arrow-flight = { path = "/Users/shenyijie/oss/arrow2/arrow-flight" }
59 changes: 6 additions & 53 deletions ballista/rust/core/src/execution_plans/shuffle_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ use datafusion::arrow::io::ipc::read::FileReader;
use datafusion::arrow::io::ipc::write::FileWriter;
use datafusion::arrow::record_batch::RecordBatch;
use datafusion::error::{DataFusionError, Result};
use datafusion::physical_plan::common::IPCWriterWrapper;
use datafusion::physical_plan::hash_utils::create_hashes;
use datafusion::physical_plan::metrics::{
self, ExecutionPlanMetricsSet, MetricBuilder, MetricsSet,
Expand Down Expand Up @@ -197,7 +198,7 @@ impl ShuffleWriterExec {

// we won't necessary produce output for every possible partition, so we
// create writers on demand
let mut writers: Vec<Option<ShuffleWriter>> = vec![];
let mut writers: Vec<Option<IPCWriterWrapper>> = vec![];
for _ in 0..num_output_partitions {
writers.push(None);
}
Expand Down Expand Up @@ -267,8 +268,10 @@ impl ShuffleWriterExec {
let path = path.to_str().unwrap();
info!("Writing results to {}", path);

let mut writer =
ShuffleWriter::new(path, stream.schema().as_ref())?;
let mut writer = IPCWriterWrapper::new(
path,
stream.schema().as_ref(),
)?;

writer.write(&output_batch)?;
writers[output_partition] = Some(writer);
Expand Down Expand Up @@ -433,56 +436,6 @@ fn result_schema() -> SchemaRef {
]))
}

struct ShuffleWriter {
path: String,
writer: FileWriter<BufWriter<File>>,
num_batches: u64,
num_rows: u64,
num_bytes: u64,
}

impl ShuffleWriter {
fn new(path: &str, schema: &Schema) -> Result<Self> {
let file = File::create(path)
.map_err(|e| {
BallistaError::General(format!(
"Failed to create partition file at {}: {:?}",
path, e
))
})
.map_err(|e| DataFusionError::Execution(format!("{:?}", e)))?;
let buffer_writer = std::io::BufWriter::new(file);
Ok(Self {
num_batches: 0,
num_rows: 0,
num_bytes: 0,
path: path.to_owned(),
writer: FileWriter::try_new(buffer_writer, schema)?,
})
}

fn write(&mut self, batch: &RecordBatch) -> Result<()> {
self.writer.write(batch)?;
self.num_batches += 1;
self.num_rows += batch.num_rows() as u64;
let num_bytes: usize = batch
.columns()
.iter()
.map(|array| estimated_bytes_size(array.as_ref()))
.sum();
self.num_bytes += num_bytes as u64;
Ok(())
}

fn finish(&mut self) -> Result<()> {
self.writer.finish().map_err(DataFusionError::ArrowError)
}

fn path(&self) -> &str {
&self.path
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
30 changes: 16 additions & 14 deletions ballista/rust/core/src/serde/physical_plan/from_proto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,8 @@ use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::sync::Arc;

use crate::error::BallistaError;
use crate::execution_plans::{
ShuffleReaderExec, ShuffleWriterExec, UnresolvedShuffleExec,
};
use crate::serde::protobuf::repartition_exec_node::PartitionMethod;
use crate::serde::protobuf::ShuffleReaderPartition;
use crate::serde::scheduler::PartitionLocation;
use crate::serde::{from_proto_binary_op, proto_error, protobuf};
use crate::{convert_box_required, convert_required, into_required};
use log::debug;

use datafusion::arrow::datatypes::{DataType, Schema, SchemaRef};
use datafusion::catalog::catalog::{
CatalogList, CatalogProvider, MemoryCatalogList, MemoryCatalogProvider,
Expand All @@ -46,7 +39,8 @@ use datafusion::physical_plan::aggregates::{create_aggregate_expr, AggregateFunc
use datafusion::physical_plan::avro::{AvroExec, AvroReadOptions};
use datafusion::physical_plan::coalesce_partitions::CoalescePartitionsExec;
use datafusion::physical_plan::hash_aggregate::{AggregateMode, HashAggregateExec};
use datafusion::physical_plan::hash_join::PartitionMode;
use datafusion::physical_plan::joins::cross_join::CrossJoinExec;
use datafusion::physical_plan::joins::hash_join::{HashJoinExec, PartitionMode};
use datafusion::physical_plan::metrics::ExecutionPlanMetricsSet;
use datafusion::physical_plan::parquet::ParquetPartition;
use datafusion::physical_plan::planner::DefaultPhysicalPlanner;
Expand All @@ -56,7 +50,6 @@ use datafusion::physical_plan::window_functions::{
use datafusion::physical_plan::windows::{create_window_expr, WindowAggExec};
use datafusion::physical_plan::{
coalesce_batches::CoalesceBatchesExec,
cross_join::CrossJoinExec,
csv::CsvExec,
empty::EmptyExec,
expressions::{
Expand All @@ -65,22 +58,31 @@ use datafusion::physical_plan::{
},
filter::FilterExec,
functions::{self, BuiltinScalarFunction, ScalarFunctionExpr},
hash_join::HashJoinExec,
limit::{GlobalLimitExec, LocalLimitExec},
parquet::ParquetExec,
projection::ProjectionExec,
repartition::RepartitionExec,
sort::{SortExec, SortOptions},
sorts::sort::SortExec,
sorts::SortOptions,
Partitioning,
};
use datafusion::physical_plan::{
AggregateExpr, ExecutionPlan, PhysicalExpr, Statistics, WindowExpr,
};
use datafusion::prelude::CsvReadOptions;
use log::debug;
use protobuf::physical_expr_node::ExprType;
use protobuf::physical_plan_node::PhysicalPlanType;

use crate::error::BallistaError;
use crate::execution_plans::{
ShuffleReaderExec, ShuffleWriterExec, UnresolvedShuffleExec,
};
use crate::serde::protobuf::repartition_exec_node::PartitionMethod;
use crate::serde::protobuf::ShuffleReaderPartition;
use crate::serde::scheduler::PartitionLocation;
use crate::serde::{from_proto_binary_op, proto_error, protobuf};
use crate::{convert_box_required, convert_required, into_required};

impl TryInto<Arc<dyn ExecutionPlan>> for &protobuf::PhysicalPlanNode {
type Error = BallistaError;

Expand Down
7 changes: 4 additions & 3 deletions ballista/rust/core/src/serde/physical_plan/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ pub mod to_proto;
mod roundtrip_tests {
use std::{convert::TryInto, sync::Arc};

use datafusion::physical_plan::joins::hash_join::{HashJoinExec, PartitionMode};
use datafusion::{
arrow::{
compute::sort::SortOptions,
Expand All @@ -34,18 +35,18 @@ mod roundtrip_tests {
expressions::{Avg, Column, PhysicalSortExpr},
filter::FilterExec,
hash_aggregate::{AggregateMode, HashAggregateExec},
hash_join::{HashJoinExec, PartitionMode},
limit::{GlobalLimitExec, LocalLimitExec},
sort::SortExec,
sorts::sort::SortExec,
AggregateExpr, ColumnarValue, Distribution, ExecutionPlan, Partitioning,
PhysicalExpr,
},
scalar::ScalarValue,
};

use crate::execution_plans::ShuffleWriterExec;

use super::super::super::error::Result;
use super::super::protobuf;
use crate::execution_plans::ShuffleWriterExec;

fn roundtrip_test(exec_plan: Arc<dyn ExecutionPlan>) -> Result<()> {
let proto: protobuf::PhysicalPlanNode = exec_plan.clone().try_into()?;
Expand Down
6 changes: 3 additions & 3 deletions ballista/rust/core/src/serde/physical_plan/to_proto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,19 +28,19 @@ use std::{

use datafusion::logical_plan::JoinType;
use datafusion::physical_plan::coalesce_batches::CoalesceBatchesExec;
use datafusion::physical_plan::cross_join::CrossJoinExec;
use datafusion::physical_plan::csv::CsvExec;
use datafusion::physical_plan::expressions::{
CaseExpr, InListExpr, IsNotNullExpr, IsNullExpr, NegativeExpr, NotExpr,
};
use datafusion::physical_plan::expressions::{CastExpr, TryCastExpr};
use datafusion::physical_plan::filter::FilterExec;
use datafusion::physical_plan::hash_aggregate::AggregateMode;
use datafusion::physical_plan::hash_join::{HashJoinExec, PartitionMode};
use datafusion::physical_plan::joins::cross_join::CrossJoinExec;
use datafusion::physical_plan::joins::hash_join::{HashJoinExec, PartitionMode};
use datafusion::physical_plan::limit::{GlobalLimitExec, LocalLimitExec};
use datafusion::physical_plan::parquet::{ParquetExec, ParquetPartition};
use datafusion::physical_plan::projection::ProjectionExec;
use datafusion::physical_plan::sort::SortExec;
use datafusion::physical_plan::sorts::sort::SortExec;
use datafusion::{
physical_plan::expressions::{Count, Literal},
scalar::ScalarValue,
Expand Down
4 changes: 2 additions & 2 deletions ballista/rust/core/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@ use datafusion::physical_plan::empty::EmptyExec;
use datafusion::physical_plan::expressions::{BinaryExpr, Column, Literal};
use datafusion::physical_plan::filter::FilterExec;
use datafusion::physical_plan::hash_aggregate::HashAggregateExec;
use datafusion::physical_plan::hash_join::HashJoinExec;
use datafusion::physical_plan::joins::hash_join::HashJoinExec;
use datafusion::physical_plan::parquet::ParquetExec;
use datafusion::physical_plan::projection::ProjectionExec;
use datafusion::physical_plan::sort::SortExec;
use datafusion::physical_plan::sorts::sort::SortExec;
use datafusion::physical_plan::{
metrics, AggregateExpr, ExecutionPlan, Metric, PhysicalExpr, RecordBatchStream,
};
Expand Down
4 changes: 2 additions & 2 deletions ballista/rust/scheduler/src/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -251,8 +251,8 @@ mod test {
use ballista_core::serde::protobuf;
use datafusion::physical_plan::coalesce_batches::CoalesceBatchesExec;
use datafusion::physical_plan::hash_aggregate::{AggregateMode, HashAggregateExec};
use datafusion::physical_plan::hash_join::HashJoinExec;
use datafusion::physical_plan::sort::SortExec;
use datafusion::physical_plan::joins::hash_join::HashJoinExec;
use datafusion::physical_plan::sorts::sort::SortExec;
use datafusion::physical_plan::{
coalesce_partitions::CoalescePartitionsExec, projection::ProjectionExec,
};
Expand Down
7 changes: 4 additions & 3 deletions datafusion/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ path = "src/lib.rs"
default = ["crypto_expressions", "regex_expressions", "unicode_expressions"]
simd = ["arrow/simd"]
crypto_expressions = ["md-5", "sha2"]
regex_expressions = ["regex", "lazy_static"]
regex_expressions = ["regex"]
unicode_expressions = ["unicode-segmentation"]
# Used for testing ONLY: causes all values to hash to the same value (test for collisions)
force_hash_collisions = []
Expand All @@ -67,15 +67,16 @@ sha2 = { version = "^0.9.1", optional = true }
ordered-float = "2.0"
unicode-segmentation = { version = "^1.7.1", optional = true }
regex = { version = "^1.4.3", optional = true }
lazy_static = { version = "^1.4.0", optional = true }
lazy_static = { version = "^1.4.0"}
smallvec = { version = "1.6", features = ["union"] }
rand = "0.8"
avro-rs = { version = "0.13", features = ["snappy"], optional = true }
num-traits = { version = "0.2", optional = true }
uuid = { version = "0.8", features = ["v4"] }
tempfile = "3"

[dev-dependencies]
criterion = "0.3"
tempfile = "3"
doc-comment = "0.3"

[[bench]]
Expand Down
4 changes: 3 additions & 1 deletion datafusion/benches/aggregate_query_sql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,5 +132,7 @@ fn criterion_benchmark(c: &mut Criterion) {
});
}

criterion_group!(benches, criterion_benchmark);
criterion_group!(name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(30));
targets = criterion_benchmark);
criterion_main!(benches);
Loading