Skip to content

Commit

Permalink
minor cleaning up, test IPC reader with more files
Browse files Browse the repository at this point in the history
  • Loading branch information
nevi-me committed Nov 19, 2019
1 parent c710076 commit 84e31b7
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 11 deletions.
3 changes: 2 additions & 1 deletion rust/arrow/src/datatypes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,8 @@ impl ArrowNativeType for u64 {

impl ArrowNativeType for f32 {
fn into_json_value(self) -> Option<Value> {
Number::from_f64(f64::round(self as f64 * 1000.0) / 1000.0).map(|num| VNumber(num))
Number::from_f64(f64::round(self as f64 * 1000.0) / 1000.0)
.map(|num| VNumber(num))
}
}

Expand Down
11 changes: 6 additions & 5 deletions rust/arrow/src/ipc/file/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ static ARROW_MAGIC: [u8; 6] = [b'A', b'R', b'R', b'O', b'W', b'1'];
/// Read a buffer based on offset and length
fn read_buffer(buf: &ipc::Buffer, a_data: &Vec<u8>) -> Buffer {
let start_offset = buf.offset() as usize;
dbg!(&buf);
let end_offset = start_offset + buf.length() as usize;
let buf_data = &a_data[start_offset..end_offset];
Buffer::from(&buf_data)
Expand Down Expand Up @@ -174,7 +173,6 @@ fn create_array(
);
node_index = node_index + 1;
buffer_index = buffer_index + 2;
dbg!((array.len(), &array));
array
}
};
Expand Down Expand Up @@ -291,6 +289,8 @@ fn create_primitive_array(
make_array(array_data)
}

/// Reads the correct number of buffers based on list type an null_count, and creates a
/// list array ref
fn create_list_array(
field_node: &ipc::FieldNode,
data_type: &DataType,
Expand Down Expand Up @@ -328,6 +328,7 @@ fn create_list_array(
}
}

/// Creates a record batch from binary data using the `ipc::RecordBatch` indexes and the `Schema`
fn read_record_batch(
buf: &Vec<u8>,
batch: ipc::RecordBatch,
Expand Down Expand Up @@ -530,9 +531,9 @@ mod tests {
// the test is repetitive, thus we can read all supported files at once
let paths = vec![
// "generated_datetime",
// "generated_nested",
// "generated_primitive_no_batches",
// "generated_primitive_zerolength",
"generated_nested",
"generated_primitive_no_batches",
"generated_primitive_zerolength",
"generated_primitive",
];
paths.iter().for_each(|path| {
Expand Down
6 changes: 1 addition & 5 deletions rust/arrow/src/util/integration_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ struct ArrowJsonColumn {
}

impl ArrowJson {
// Compare the Arrow JSON with a record batch reader
/// Compare the Arrow JSON with a record batch reader
pub fn equals_reader(&self, reader: &mut RecordBatchReader) -> bool {
if !self.schema.equals_schema(&reader.schema()) {
return false;
Expand Down Expand Up @@ -115,7 +115,6 @@ impl ArrowJsonBatch {
return false;
}
let json_array: Vec<Value> = json_from_col(&col, field.data_type());
println!("Data type: {:?}", field.data_type());
match field.data_type() {
DataType::Boolean => {
let arr = arr.as_any().downcast_ref::<BooleanArray>().unwrap();
Expand Down Expand Up @@ -158,9 +157,6 @@ impl ArrowJsonBatch {
}
DataType::Float32 => {
let arr = arr.as_any().downcast_ref::<Float32Array>().unwrap();
dbg!(&arr);
dbg!(&arr.len());
dbg!(&json_array);
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Float64 => {
Expand Down

0 comments on commit 84e31b7

Please sign in to comment.