diff --git a/spectral_cube/dask_spectral_cube.py b/spectral_cube/dask_spectral_cube.py index 13179bd1..7f8ec57b 100644 --- a/spectral_cube/dask_spectral_cube.py +++ b/spectral_cube/dask_spectral_cube.py @@ -287,15 +287,6 @@ def __exit__(self, *args): def _compute(self, array): return array.compute(**self._scheduler_kwargs) - def _warn_slow(self, funcname): - if self._is_huge and not self.allow_huge_operations: - raise ValueError("This function ({0}) requires loading the entire " - "cube into memory, and the cube is large ({1} " - "pixels), so by default we disable this operation. " - "To enable the operation, set " - "`cube.allow_huge_operations=True` and try again." - .format(funcname, self.size)) - def _get_filled_data(self, view=(), fill=np.nan, check_endian=None, use_memmap=None): if check_endian: diff --git a/spectral_cube/spectral_cube.py b/spectral_cube/spectral_cube.py index 208011e4..81271c1a 100644 --- a/spectral_cube/spectral_cube.py +++ b/spectral_cube/spectral_cube.py @@ -1191,6 +1191,17 @@ def median(self, axis=None, iterate_rays=False, **kwargs): except ImportError: bnok = False + how = kwargs.pop('how', None) + if how == 'slice' and (not isinstance(axis, (list, tuple)) or len(axis) != 2): + raise ValueError("Cannot compute median slicewise unless you're compressing over two axes.") + elif how == 'ray': + if axis not in (0, 1, 2): + raise ValueError("Cannot compute median raywise unless you're compressing over one axis.") + else: + if not iterate_rays: + iterate_rays = True + warnings.warn("how='ray' was specified in call to median; this is setting iterate_rays=True") + # slicewise median is nonsense, must force how = 'cube' # bottleneck.nanmedian does not allow axis to be a list or tuple if bnok and not iterate_rays and not isinstance(axis, (list, tuple)): @@ -1203,7 +1214,12 @@ def median(self, axis=None, iterate_rays=False, **kwargs): log.debug("Using numpy nanmedian") result = self.apply_numpy_function(np.nanmedian, axis=axis, projection=True, unit=self.unit, - how='cube',**kwargs) + how='cube', **kwargs) + elif iterate_rays: + result = self.apply_numpy_function( + nanmedian if bnok else np.nanmedian if hasattr(np, 'nanmedian') else np.median, + axis=axis, projection=True, unit=self.unit, how='ray', + check_endian=True, **kwargs) else: log.debug("Using numpy median iterating over rays") result = self.apply_function(np.median, projection=True, axis=axis, diff --git a/spectral_cube/utils.py b/spectral_cube/utils.py index ad4200f2..58b03176 100644 --- a/spectral_cube/utils.py +++ b/spectral_cube/utils.py @@ -1,4 +1,5 @@ import warnings +import inspect from functools import wraps @@ -33,19 +34,26 @@ def warn_slow(function): @wraps(function) def wrapper(self, *args, **kwargs): # if the function accepts a 'how', the 'cube' approach requires the whole cube in memory - warn_how = (kwargs.get('how') == 'cube') or 'how' not in kwargs - if self._is_huge and not self.allow_huge_operations and warn_how: - raise ValueError("This function ({0}) requires loading the entire " - "cube into memory, and the cube is large ({1} " - "pixels), so by default we disable this operation. " - "To enable the operation, set " - "`cube.allow_huge_operations=True` and try again. " - "Alternatively, you may want to consider using an " - "approach that does not load the whole cube into " - "memory by specifying how='slice' or how='ray'. " - "See {bigdataurl} for details." - .format(str(function), self.size, - bigdataurl=bigdataurl)) + argspec = inspect.getfullargspec(function) + accepts_how_keyword = 'how' in argspec.args or argspec.varkw == 'how' + + warn_how = accepts_how_keyword and ((kwargs.get('how') == 'cube') or 'how' not in kwargs) + + if self._is_huge and not self.allow_huge_operations: + warn_message = ("This function ({0}) requires loading the entire " + "cube into memory, and the cube is large ({1} " + "pixels), so by default we disable this operation. " + "To enable the operation, set " + "`cube.allow_huge_operations=True` and try again. ").format(str(function), self.size) + + if warn_how: + warn_message += ("Alternatively, you may want to consider using an " + "approach that does not load the whole cube into " + "memory by specifying how='slice' or how='ray'. ") + + warn_message += ("See {bigdataurl} for details.".format(bigdataurl=bigdataurl)) + + raise ValueError(warn_message) elif warn_how and not self._is_huge: # TODO: add check for whether cube has been loaded into memory warnings.warn("This function ({0}) requires loading the entire cube into "