From 28aad3e1cc4238238b200150c7d4775937c24c79 Mon Sep 17 00:00:00 2001 From: "Jorge C. Leitao" Date: Tue, 21 Dec 2021 05:53:14 +0000 Subject: [PATCH] Cleaned up trait usage and added forbid_unsafe to parts --- src/array/map/mod.rs | 9 ++++----- src/io/avro/mod.rs | 1 + src/io/csv/mod.rs | 1 + src/io/csv/write/serialize.rs | 6 +++--- src/io/ipc/read/array/binary.rs | 7 +------ src/io/ipc/read/array/utf8.rs | 7 +------ src/io/json/mod.rs | 1 + 7 files changed, 12 insertions(+), 20 deletions(-) diff --git a/src/array/map/mod.rs b/src/array/map/mod.rs index 9bd05e1f5a5..f993ef6ff8d 100644 --- a/src/array/map/mod.rs +++ b/src/array/map/mod.rs @@ -4,7 +4,6 @@ use crate::{ bitmap::Bitmap, buffer::Buffer, datatypes::{DataType, Field}, - types::Index, }; use super::{new_empty_array, specification::check_offsets, Array}; @@ -138,12 +137,12 @@ impl MapArray { pub fn value(&self, i: usize) -> Box { let offset = self.offsets[i]; let offset_1 = self.offsets[i + 1]; - let length = (offset_1 - offset).to_usize(); + let length = (offset_1 - offset) as usize; // Safety: // One of the invariants of the struct // is that offsets are in bounds - unsafe { self.field.slice_unchecked(offset.to_usize(), length) } + unsafe { self.field.slice_unchecked(offset as usize, length) } } /// Returns the element at index `i`. @@ -153,9 +152,9 @@ impl MapArray { pub unsafe fn value_unchecked(&self, i: usize) -> Box { let offset = *self.offsets.get_unchecked(i); let offset_1 = *self.offsets.get_unchecked(i + 1); - let length = (offset_1 - offset).to_usize(); + let length = (offset_1 - offset) as usize; - self.field.slice_unchecked(offset.to_usize(), length) + self.field.slice_unchecked(offset as usize, length) } } diff --git a/src/io/avro/mod.rs b/src/io/avro/mod.rs index fe9eb8bd1d3..d5a69ab3557 100644 --- a/src/io/avro/mod.rs +++ b/src/io/avro/mod.rs @@ -1,4 +1,5 @@ #![deny(missing_docs)] +#![forbid(unsafe_code)] //! Read and write from and to Apache Avro pub mod read; diff --git a/src/io/csv/mod.rs b/src/io/csv/mod.rs index 00b14185051..037e243b8a1 100644 --- a/src/io/csv/mod.rs +++ b/src/io/csv/mod.rs @@ -1,4 +1,5 @@ #![deny(missing_docs)] +#![forbid(unsafe_code)] //! Convert data between the Arrow and CSV (comma-separated values). use crate::error::ArrowError; diff --git a/src/io/csv/write/serialize.rs b/src/io/csv/write/serialize.rs index 959c1f56b1b..f2552832dcb 100644 --- a/src/io/csv/write/serialize.rs +++ b/src/io/csv/write/serialize.rs @@ -2,7 +2,7 @@ use lexical_core::ToLexical; use crate::datatypes::IntegerType; use crate::temporal_conversions; -use crate::types::{Index, NativeType}; +use crate::types::NativeType; use crate::util::lexical_to_bytes_mut; use crate::{ array::{Array, BinaryArray, BooleanArray, PrimitiveArray, Utf8Array}, @@ -405,7 +405,7 @@ pub fn new_serializer<'a>( /// Helper for serializing a dictonary array. The generic parameters are: /// - `K` for the type of the keys of the dictionary /// - `O` for the type of the offsets in the Utf8Array: {i32, i64} -fn serialize_utf8_dict<'a, K: DictionaryKey + Index, O: Offset>( +fn serialize_utf8_dict<'a, K: DictionaryKey, O: Offset>( array: &'a dyn Any, ) -> Box + 'a> { let array = array.downcast_ref::>().unwrap(); @@ -419,7 +419,7 @@ fn serialize_utf8_dict<'a, K: DictionaryKey + Index, O: Offset>( keys.iter(), move |x, buf| { if let Some(x) = x { - let i = Index::to_usize(x); + let i = x.to_usize().unwrap(); if !values.is_null(i) { let val = values.value(i); buf.extend_from_slice(val.as_bytes()); diff --git a/src/io/ipc/read/array/binary.rs b/src/io/ipc/read/array/binary.rs index 485e2451637..98459ea5f5b 100644 --- a/src/io/ipc/read/array/binary.rs +++ b/src/io/ipc/read/array/binary.rs @@ -1,5 +1,4 @@ use std::collections::VecDeque; -use std::convert::TryInto; use std::io::{Read, Seek}; use arrow_format::ipc; @@ -8,7 +7,6 @@ use crate::array::{BinaryArray, Offset}; use crate::buffer::Buffer; use crate::datatypes::DataType; use crate::error::Result; -use crate::types::NativeType; use super::super::deserialize::Node; use super::super::read_basic::*; @@ -21,10 +19,7 @@ pub fn read_binary( block_offset: u64, is_little_endian: bool, compression: Option, -) -> Result> -where - Vec: TryInto + TryInto<::Bytes>, -{ +) -> Result> { let field_node = field_nodes.pop_front().unwrap(); let validity = read_validity( diff --git a/src/io/ipc/read/array/utf8.rs b/src/io/ipc/read/array/utf8.rs index 93d024b9f13..a997e9effa3 100644 --- a/src/io/ipc/read/array/utf8.rs +++ b/src/io/ipc/read/array/utf8.rs @@ -1,5 +1,4 @@ use std::collections::VecDeque; -use std::convert::TryInto; use std::io::{Read, Seek}; use arrow_format::ipc; @@ -8,7 +7,6 @@ use crate::array::{Offset, Utf8Array}; use crate::buffer::Buffer; use crate::datatypes::DataType; use crate::error::Result; -use crate::types::NativeType; use super::super::deserialize::Node; use super::super::read_basic::*; @@ -21,10 +19,7 @@ pub fn read_utf8( block_offset: u64, is_little_endian: bool, compression: Option, -) -> Result> -where - Vec: TryInto + TryInto<::Bytes>, -{ +) -> Result> { let field_node = field_nodes.pop_front().unwrap(); let validity = read_validity( diff --git a/src/io/json/mod.rs b/src/io/json/mod.rs index 46742b41711..e5acb380a90 100644 --- a/src/io/json/mod.rs +++ b/src/io/json/mod.rs @@ -1,4 +1,5 @@ #![deny(missing_docs)] +#![forbid(unsafe_code)] //! Convert data between the Arrow memory format and JSON line-delimited records. mod read;