Skip to content

Commit

Permalink
CLN: update Appender to doc with case __doc__ (pandas-dev#32956)
Browse files Browse the repository at this point in the history
  • Loading branch information
HH-MWB authored Mar 27, 2020
1 parent 48ddfbb commit e88c392
Show file tree
Hide file tree
Showing 8 changed files with 39 additions and 39 deletions.
2 changes: 1 addition & 1 deletion pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -2300,7 +2300,7 @@ def to_html(
)

# ----------------------------------------------------------------------
@Appender(info.__doc__)
@doc(info)
def info(
self, verbose=None, buf=None, max_cols=None, memory_usage=None, null_counts=None
) -> None:
Expand Down
8 changes: 4 additions & 4 deletions pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1723,7 +1723,7 @@ def items(self):
for h in self._info_axis:
yield h, self[h]

@Appender(items.__doc__)
@doc(items)
def iteritems(self):
return self.items()

Expand Down Expand Up @@ -10222,7 +10222,7 @@ def _add_series_or_dataframe_operations(cls):
"""
from pandas.core.window import EWM, Expanding, Rolling, Window

@Appender(Rolling.__doc__)
@doc(Rolling)
def rolling(
self,
window,
Expand Down Expand Up @@ -10260,14 +10260,14 @@ def rolling(

cls.rolling = rolling

@Appender(Expanding.__doc__)
@doc(Expanding)
def expanding(self, min_periods=1, center=False, axis=0):
axis = self._get_axis_number(axis)
return Expanding(self, min_periods=min_periods, center=center, axis=axis)

cls.expanding = expanding

@Appender(EWM.__doc__)
@doc(EWM)
def ewm(
self,
com=None,
Expand Down
20 changes: 10 additions & 10 deletions pandas/core/indexes/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from pandas._typing import AnyArrayLike, Scalar
from pandas.compat.numpy import function as nv
from pandas.errors import PerformanceWarning, UnsortedIndexError
from pandas.util._decorators import Appender, cache_readonly
from pandas.util._decorators import Appender, cache_readonly, doc

from pandas.core.dtypes.cast import coerce_indexer_dtype
from pandas.core.dtypes.common import (
Expand Down Expand Up @@ -986,7 +986,7 @@ def _engine(self):
def _constructor(self):
return MultiIndex.from_tuples

@Appender(Index._shallow_copy.__doc__)
@doc(Index._shallow_copy)
def _shallow_copy(
self,
values=None,
Expand Down Expand Up @@ -1098,7 +1098,7 @@ def view(self, cls=None):
result._id = self._id
return result

@Appender(Index.__contains__.__doc__)
@doc(Index.__contains__)
def __contains__(self, key: Any) -> bool:
hash(key)
try:
Expand All @@ -1119,7 +1119,7 @@ def f(l):

return any(f(l) for l in self._inferred_type_levels)

@Appender(Index.memory_usage.__doc__)
@doc(Index.memory_usage)
def memory_usage(self, deep: bool = False) -> int:
# we are overwriting our base class to avoid
# computing .values here which could materialize
Expand Down Expand Up @@ -1351,7 +1351,7 @@ def _set_names(self, names, level=None, validate=True):

# --------------------------------------------------------------------

@Appender(Index._get_grouper_for_level.__doc__)
@doc(Index._get_grouper_for_level)
def _get_grouper_for_level(self, mapper, level):
indexer = self.codes[level]
level_index = self.levels[level]
Expand Down Expand Up @@ -1462,7 +1462,7 @@ def _inferred_type_levels(self):
""" return a list of the inferred types, one for each level """
return [i.inferred_type for i in self.levels]

@Appender(Index.duplicated.__doc__)
@doc(Index.duplicated)
def duplicated(self, keep="first"):
shape = map(len, self.levels)
ids = get_group_index(self.codes, shape, sort=False, xnull=False)
Expand All @@ -1475,7 +1475,7 @@ def fillna(self, value=None, downcast=None):
"""
raise NotImplementedError("isna is not defined for MultiIndex")

@Appender(Index.dropna.__doc__)
@doc(Index.dropna)
def dropna(self, how="any"):
nans = [level_codes == -1 for level_codes in self.codes]
if how == "any":
Expand Down Expand Up @@ -1548,7 +1548,7 @@ def get_level_values(self, level):
values = self._get_level_values(level)
return values

@Appender(Index.unique.__doc__)
@doc(Index.unique)
def unique(self, level=None):

if level is None:
Expand Down Expand Up @@ -3423,7 +3423,7 @@ def _convert_can_do_setop(self, other):

# --------------------------------------------------------------------

@Appender(Index.astype.__doc__)
@doc(Index.astype)
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if is_categorical_dtype(dtype):
Expand Down Expand Up @@ -3498,7 +3498,7 @@ def _wrap_joined_index(self, joined, other):
names = self.names if self.names == other.names else None
return MultiIndex.from_tuples(joined, names=names)

@Appender(Index.isin.__doc__)
@doc(Index.isin)
def isin(self, values, level=None):
if level is None:
values = MultiIndex.from_tuples(values, names=self.names)._values
Expand Down
22 changes: 11 additions & 11 deletions pandas/core/indexes/numeric.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from pandas._libs import index as libindex, lib
from pandas._typing import Dtype, Label
from pandas.util._decorators import Appender, cache_readonly
from pandas.util._decorators import cache_readonly, doc

from pandas.core.dtypes.cast import astype_nansafe
from pandas.core.dtypes.common import (
Expand Down Expand Up @@ -95,14 +95,14 @@ def _validate_dtype(cls, dtype: Dtype) -> None:
f"Incorrect `dtype` passed: expected {expected}, received {dtype}"
)

@Appender(Index._maybe_cast_slice_bound.__doc__)
@doc(Index._maybe_cast_slice_bound)
def _maybe_cast_slice_bound(self, label, side, kind):
assert kind in ["loc", "getitem", None]

# we will try to coerce to integers
return self._maybe_cast_indexer(label)

@Appender(Index._shallow_copy.__doc__)
@doc(Index._shallow_copy)
def _shallow_copy(self, values=None, name: Label = lib.no_default):
if values is not None and not self._can_hold_na and values.dtype.kind == "f":
name = self.name if name is lib.no_default else name
Expand Down Expand Up @@ -158,7 +158,7 @@ def is_all_dates(self) -> bool:
"""
return False

@Appender(Index.insert.__doc__)
@doc(Index.insert)
def insert(self, loc: int, item):
# treat NA values as nans:
if is_scalar(item) and isna(item):
Expand Down Expand Up @@ -295,7 +295,7 @@ class UInt64Index(IntegerIndex):
_engine_type = libindex.UInt64Engine
_default_dtype = np.dtype(np.uint64)

@Appender(Index._convert_arr_indexer.__doc__)
@doc(Index._convert_arr_indexer)
def _convert_arr_indexer(self, keyarr):
# Cast the indexer to uint64 if possible so that the values returned
# from indexing are also uint64.
Expand All @@ -307,7 +307,7 @@ def _convert_arr_indexer(self, keyarr):

return com.asarray_tuplesafe(keyarr, dtype=dtype)

@Appender(Index._convert_index_indexer.__doc__)
@doc(Index._convert_index_indexer)
def _convert_index_indexer(self, keyarr):
# Cast the indexer to uint64 if possible so
# that the values returned from indexing are
Expand Down Expand Up @@ -357,7 +357,7 @@ def inferred_type(self) -> str:
"""
return "floating"

@Appender(Index.astype.__doc__)
@doc(Index.astype)
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if needs_i8_conversion(dtype):
Expand All @@ -375,11 +375,11 @@ def astype(self, dtype, copy=True):
# ----------------------------------------------------------------
# Indexing Methods

@Appender(Index._should_fallback_to_positional.__doc__)
@doc(Index._should_fallback_to_positional)
def _should_fallback_to_positional(self):
return False

@Appender(Index._convert_slice_indexer.__doc__)
@doc(Index._convert_slice_indexer)
def _convert_slice_indexer(self, key: slice, kind: str):
assert kind in ["loc", "getitem"]

Expand Down Expand Up @@ -433,7 +433,7 @@ def __contains__(self, other: Any) -> bool:

return is_float(other) and np.isnan(other) and self.hasnans

@Appender(Index.get_loc.__doc__)
@doc(Index.get_loc)
def get_loc(self, key, method=None, tolerance=None):
if is_bool(key):
# Catch this to avoid accidentally casting to 1.0
Expand All @@ -453,7 +453,7 @@ def get_loc(self, key, method=None, tolerance=None):
def is_unique(self) -> bool:
return super().is_unique and self._nan_idxs.size < 2

@Appender(Index.isin.__doc__)
@doc(Index.isin)
def isin(self, values, level=None):
if level is not None:
self._validate_index_level(level)
Expand Down
6 changes: 3 additions & 3 deletions pandas/core/indexes/period.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from pandas._libs.tslibs.parsing import parse_time_string
from pandas._libs.tslibs.period import Period
from pandas._typing import DtypeObj, Label
from pandas.util._decorators import Appender, cache_readonly
from pandas.util._decorators import Appender, cache_readonly, doc

from pandas.core.dtypes.common import (
ensure_platform_int,
Expand Down Expand Up @@ -327,7 +327,7 @@ def _engine(self):
period = weakref.ref(self)
return self._engine_type(period, len(self))

@Appender(Index.__contains__.__doc__)
@doc(Index.__contains__)
def __contains__(self, key: Any) -> bool:
if isinstance(key, Period):
if key.freq != self.freq:
Expand Down Expand Up @@ -405,7 +405,7 @@ def asof_locs(self, where, mask: np.ndarray) -> np.ndarray:

return result

@Appender(Index.astype.__doc__)
@doc(Index.astype)
def astype(self, dtype, copy=True, how="start"):
dtype = pandas_dtype(dtype)

Expand Down
10 changes: 5 additions & 5 deletions pandas/core/indexes/range.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from pandas._typing import Label
import pandas.compat as compat
from pandas.compat.numpy import function as nv
from pandas.util._decorators import Appender, cache_readonly
from pandas.util._decorators import Appender, cache_readonly, doc

from pandas.core.dtypes.common import (
ensure_platform_int,
Expand Down Expand Up @@ -342,7 +342,7 @@ def __contains__(self, key: Any) -> bool:
return False
return key in self._range

@Appender(Int64Index.get_loc.__doc__)
@doc(Int64Index.get_loc)
def get_loc(self, key, method=None, tolerance=None):
if method is None and tolerance is None:
if is_integer(key) or (is_float(key) and key.is_integer()):
Expand Down Expand Up @@ -386,7 +386,7 @@ def get_indexer(self, target, method=None, limit=None, tolerance=None):
def tolist(self):
return list(self._range)

@Appender(Int64Index._shallow_copy.__doc__)
@doc(Int64Index._shallow_copy)
def _shallow_copy(self, values=None, name: Label = no_default):
name = self.name if name is no_default else name

Expand All @@ -397,7 +397,7 @@ def _shallow_copy(self, values=None, name: Label = no_default):
else:
return Int64Index._simple_new(values, name=name)

@Appender(Int64Index.copy.__doc__)
@doc(Int64Index.copy)
def copy(self, name=None, deep=False, dtype=None, **kwargs):
self._validate_dtype(dtype)
if name is None:
Expand Down Expand Up @@ -619,7 +619,7 @@ def _union(self, other, sort):
return type(self)(start_r, end_r + step_o, step_o)
return self._int64index._union(other, sort=sort)

@Appender(Int64Index.join.__doc__)
@doc(Int64Index.join)
def join(self, other, how="left", level=None, return_indexers=False, sort=False):
if how == "outer" and self is not other:
# note: could return RangeIndex in more circumstances
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/indexes/timedeltas.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from pandas._libs import NaT, Timedelta, index as libindex
from pandas._typing import DtypeObj, Label
from pandas.util._decorators import Appender
from pandas.util._decorators import doc

from pandas.core.dtypes.common import (
_TD_DTYPE,
Expand Down Expand Up @@ -195,7 +195,7 @@ def _formatter_func(self):

# -------------------------------------------------------------------

@Appender(Index.astype.__doc__)
@doc(Index.astype)
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if is_timedelta64_dtype(dtype) and not is_timedelta64_ns_dtype(dtype):
Expand Down
6 changes: 3 additions & 3 deletions pandas/core/resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from pandas._libs.tslibs.period import IncompatibleFrequency
from pandas.compat.numpy import function as nv
from pandas.errors import AbstractMethodError
from pandas.util._decorators import Appender, Substitution
from pandas.util._decorators import Appender, Substitution, doc

from pandas.core.dtypes.generic import ABCDataFrame, ABCSeries

Expand Down Expand Up @@ -858,7 +858,7 @@ def var(self, ddof=1, *args, **kwargs):
nv.validate_resampler_func("var", args, kwargs)
return self._downsample("var", ddof=ddof)

@Appender(GroupBy.size.__doc__)
@doc(GroupBy.size)
def size(self):
result = self._downsample("size")
if not len(self.ax):
Expand All @@ -871,7 +871,7 @@ def size(self):
result = Series([], index=result.index, dtype="int64", name=name)
return result

@Appender(GroupBy.count.__doc__)
@doc(GroupBy.count)
def count(self):
result = self._downsample("count")
if not len(self.ax):
Expand Down

0 comments on commit e88c392

Please sign in to comment.