From 029535b102c484afb8bea2c0c524bc5189dc154f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 17 May 2021 18:19:17 +0100 Subject: [PATCH] Remove workaround when dask-wrapping PP data, obsoleted by #4135. --- lib/iris/fileformats/pp.py | 30 ++++++++----------- lib/iris/util.py | 61 -------------------------------------- 2 files changed, 12 insertions(+), 79 deletions(-) diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 406da925b1..7589d27922 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -38,7 +38,6 @@ ) import iris.fileformats.rules import iris.coord_systems -from iris.util import _array_slice_ifempty try: import mo_pack @@ -594,23 +593,18 @@ def ndim(self): return len(self.shape) def __getitem__(self, keys): - # Check for 'empty' slicings, in which case don't fetch the data. - # Because, since Dask v2, 'dask.array.from_array' performs an empty - # slicing and we must not fetch the data at that time. - result = _array_slice_ifempty(keys, self.shape, self.dtype) - if result is None: - with open(self.path, "rb") as pp_file: - pp_file.seek(self.offset, os.SEEK_SET) - data_bytes = pp_file.read(self.data_len) - data = _data_bytes_to_shaped_array( - data_bytes, - self.lbpack, - self.boundary_packing, - self.shape, - self.src_dtype, - self.mdi, - ) - result = data.__getitem__(keys) + with open(self.path, "rb") as pp_file: + pp_file.seek(self.offset, os.SEEK_SET) + data_bytes = pp_file.read(self.data_len) + data = _data_bytes_to_shaped_array( + data_bytes, + self.lbpack, + self.boundary_packing, + self.shape, + self.src_dtype, + self.mdi, + ) + result = data.__getitem__(keys) return np.asanyarray(result, dtype=self.dtype) diff --git a/lib/iris/util.py b/lib/iris/util.py index d2fda82c0f..aea07a877c 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -960,67 +960,6 @@ def __lt__(self, other): return NotImplemented -def _array_slice_ifempty(keys, shape, dtype): - """ - Detect cases where an array slice will contain no data, as it contains a - zero-length dimension, and produce an equivalent result for those cases. - - The function indicates 'empty' slicing cases, by returning an array equal - to the slice result in those cases. - - Args: - - * keys (indexing key, or tuple of keys): - The argument from an array __getitem__ call. - Only tuples of integers and slices are supported, in particular no - newaxis, ellipsis or array keys. - These are the types of array access usage we expect from Dask. - * shape (tuple of int): - The shape of the array being indexed. - * dtype (numpy.dtype): - The dtype of the array being indexed. - - Returns: - result (np.ndarray or None): - If 'keys' contains a slice(0, 0), this is an ndarray of the correct - resulting shape and provided dtype. - Otherwise it is None. - - .. note:: - - This is used to prevent DataProxy arraylike objects from fetching their - file data when wrapped as Dask arrays. - This is because, for Dask >= 2.0, the "dask.array.from_array" call - performs a fetch like [0:0, 0:0, ...], to 'snapshot' array metadata. - This function enables us to avoid triggering a file data fetch in those - cases : This is consistent because the result will not contain any - actual data content. - - """ - # Convert a single key into a 1-tuple, so we always have a tuple of keys. - if isinstance(keys, tuple): - keys_tuple = keys - else: - keys_tuple = (keys,) - - if any(key == slice(0, 0) for key in keys_tuple): - # An 'empty' slice is present : Return a 'fake' array instead. - target_shape = list(shape) - for i_dim, key in enumerate(keys_tuple): - if key == slice(0, 0): - # Reduce dims with empty slicing to length 0. - target_shape[i_dim] = 0 - # Create a prototype result : no memory usage, as some dims are 0. - result = np.zeros(target_shape, dtype=dtype) - # Index with original keys to produce the desired result shape. - # Note : also ok in 0-length dims, as the slice is always '0:0'. - result = result[keys] - else: - result = None - - return result - - def create_temp_filename(suffix=""): """Return a temporary file name.