Skip to content

Commit

Permalink
REF: de-duplicate IntervalIndex compat code (pandas-dev#36372)
Browse files Browse the repository at this point in the history
  • Loading branch information
jbrockmendel authored and Kevin D Smith committed Nov 2, 2020
1 parent 934cf9c commit bbd3d23
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 36 deletions.
17 changes: 13 additions & 4 deletions pandas/core/indexes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3316,7 +3316,7 @@ def _can_reindex(self, indexer):
ValueError if its a duplicate axis
"""
# trying to reindex on an axis with duplicates
if not self.is_unique and len(indexer):
if not self._index_as_unique and len(indexer):
raise ValueError("cannot reindex from a duplicate axis")

def reindex(self, target, method=None, level=None, limit=None, tolerance=None):
Expand Down Expand Up @@ -3360,8 +3360,7 @@ def reindex(self, target, method=None, level=None, limit=None, tolerance=None):
if self.equals(target):
indexer = None
else:
# check is_overlapping for IntervalIndex compat
if self.is_unique and not getattr(self, "is_overlapping", False):
if self._index_as_unique:
indexer = self.get_indexer(
target, method=method, limit=limit, tolerance=tolerance
)
Expand Down Expand Up @@ -4759,11 +4758,21 @@ def get_indexer_for(self, target, **kwargs):
numpy.ndarray
List of indices.
"""
if self.is_unique:
if self._index_as_unique:
return self.get_indexer(target, **kwargs)
indexer, _ = self.get_indexer_non_unique(target, **kwargs)
return indexer

@property
def _index_as_unique(self):
"""
Whether we should treat this as unique for the sake of
get_indexer vs get_indexer_non_unique.
For IntervalIndex compat.
"""
return self.is_unique

def _maybe_promote(self, other: "Index"):
"""
When dealing with an object-dtype Index and a non-object Index, see
Expand Down
34 changes: 3 additions & 31 deletions pandas/core/indexes/interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,22 +516,6 @@ def is_overlapping(self) -> bool:
# GH 23309
return self._engine.is_overlapping

def _can_reindex(self, indexer: np.ndarray) -> None:
"""
Check if we are allowing reindexing with this particular indexer.
Parameters
----------
indexer : an integer indexer
Raises
------
ValueError if its a duplicate axis
"""
# trying to reindex on an axis with duplicates
if self.is_overlapping and len(indexer):
raise ValueError("cannot reindex from an overlapping axis")

def _needs_i8_conversion(self, key) -> bool:
"""
Check if a given key needs i8 conversion. Conversion is necessary for
Expand Down Expand Up @@ -839,21 +823,9 @@ def get_indexer_non_unique(

return ensure_platform_int(indexer), ensure_platform_int(missing)

def get_indexer_for(self, target: AnyArrayLike, **kwargs) -> np.ndarray:
"""
Guaranteed return of an indexer even when overlapping.
This dispatches to get_indexer or get_indexer_non_unique
as appropriate.
Returns
-------
numpy.ndarray
List of indices.
"""
if self.is_overlapping:
return self.get_indexer_non_unique(target)[0]
return self.get_indexer(target, **kwargs)
@property
def _index_as_unique(self):
return not self.is_overlapping

def _convert_slice_indexer(self, key: slice, kind: str):
if not (key.step is None or key.step == 1):
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -1256,7 +1256,7 @@ def _get_listlike_indexer(self, key, axis: int, raise_missing: bool = False):
)
return ax[indexer], indexer

if ax.is_unique and not getattr(ax, "is_overlapping", False):
if ax._index_as_unique:
indexer = ax.get_indexer_for(keyarr)
keyarr = ax.reindex(keyarr)[0]
else:
Expand Down

0 comments on commit bbd3d23

Please sign in to comment.