Skip to content

Commit

Permalink
CLN remove unnecessary trailing commas to get ready for new version o…
Browse files Browse the repository at this point in the history
…f black: generic -> blocks (pandas-dev#35950)

* pandas/core/groupby/generic.py

* pandas/core/groupby/groupby.py

* pandas/core/groupby/ops.py

* pandas/core/indexes/datetimelike.py

* pandas/core/indexes/interval.py

* pandas/core/indexes/numeric.py

* pandas/core/indexes/range.py

* pandas/core/internals/blocks.py
  • Loading branch information
MarcoGorelli authored and Kevin D Smith committed Nov 2, 2020
1 parent 631f0b2 commit fe98274
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 43 deletions.
8 changes: 2 additions & 6 deletions pandas/core/groupby/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,9 +221,7 @@ def _selection_name(self):
def apply(self, func, *args, **kwargs):
return super().apply(func, *args, **kwargs)

@doc(
_agg_template, examples=_agg_examples_doc, klass="Series",
)
@doc(_agg_template, examples=_agg_examples_doc, klass="Series")
def aggregate(self, func=None, *args, engine=None, engine_kwargs=None, **kwargs):

if maybe_use_numba(engine):
Expand Down Expand Up @@ -935,9 +933,7 @@ class DataFrameGroupBy(GroupBy[DataFrame]):
See :ref:`groupby.aggregate.named` for more."""
)

@doc(
_agg_template, examples=_agg_examples_doc, klass="DataFrame",
)
@doc(_agg_template, examples=_agg_examples_doc, klass="DataFrame")
def aggregate(self, func=None, *args, engine=None, engine_kwargs=None, **kwargs):

if maybe_use_numba(engine):
Expand Down
8 changes: 3 additions & 5 deletions pandas/core/groupby/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -1077,7 +1077,7 @@ def _aggregate_with_numba(self, data, func, *args, engine_kwargs=None, **kwargs)
tuple(args), kwargs, func, engine_kwargs
)
result = numba_agg_func(
sorted_data, sorted_index, starts, ends, len(group_keys), len(data.columns),
sorted_data, sorted_index, starts, ends, len(group_keys), len(data.columns)
)
if cache_key not in NUMBA_FUNC_CACHE:
NUMBA_FUNC_CACHE[cache_key] = numba_agg_func
Expand Down Expand Up @@ -1595,8 +1595,7 @@ def max(self, numeric_only: bool = False, min_count: int = -1):
def first(self, numeric_only: bool = False, min_count: int = -1):
def first_compat(obj: FrameOrSeries, axis: int = 0):
def first(x: Series):
"""Helper function for first item that isn't NA.
"""
"""Helper function for first item that isn't NA."""
x = x.array[notna(x.array)]
if len(x) == 0:
return np.nan
Expand All @@ -1620,8 +1619,7 @@ def first(x: Series):
def last(self, numeric_only: bool = False, min_count: int = -1):
def last_compat(obj: FrameOrSeries, axis: int = 0):
def last(x: Series):
"""Helper function for last item that isn't NA.
"""
"""Helper function for last item that isn't NA."""
x = x.array[notna(x.array)]
if len(x) == 0:
return np.nan
Expand Down
14 changes: 4 additions & 10 deletions pandas/core/groupby/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -583,7 +583,7 @@ def transform(self, values, how: str, axis: int = 0, **kwargs):
return self._cython_operation("transform", values, how, axis, **kwargs)

def _aggregate(
self, result, counts, values, comp_ids, agg_func, min_count: int = -1,
self, result, counts, values, comp_ids, agg_func, min_count: int = -1
):
if agg_func is libgroupby.group_nth:
# different signature from the others
Expand All @@ -603,9 +603,7 @@ def _transform(

return result

def agg_series(
self, obj: Series, func: F, *args, **kwargs,
):
def agg_series(self, obj: Series, func: F, *args, **kwargs):
# Caller is responsible for checking ngroups != 0
assert self.ngroups != 0

Expand Down Expand Up @@ -653,9 +651,7 @@ def _aggregate_series_fast(self, obj: Series, func: F):
result, counts = grouper.get_result()
return result, counts

def _aggregate_series_pure_python(
self, obj: Series, func: F, *args, **kwargs,
):
def _aggregate_series_pure_python(self, obj: Series, func: F, *args, **kwargs):
group_index, _, ngroups = self.group_info

counts = np.zeros(ngroups, dtype=int)
Expand Down Expand Up @@ -841,9 +837,7 @@ def groupings(self) -> "List[grouper.Grouping]":
for lvl, name in zip(self.levels, self.names)
]

def agg_series(
self, obj: Series, func: F, *args, **kwargs,
):
def agg_series(self, obj: Series, func: F, *args, **kwargs):
# Caller is responsible for checking ngroups != 0
assert self.ngroups != 0
assert len(self.bins) > 0 # otherwise we'd get IndexError in get_result
Expand Down
4 changes: 1 addition & 3 deletions pandas/core/indexes/datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,7 @@ def wrapper(left, right):
DatetimeLikeArrayMixin,
cache=True,
)
@inherit_names(
["mean", "asi8", "freq", "freqstr", "_box_func"], DatetimeLikeArrayMixin,
)
@inherit_names(["mean", "asi8", "freq", "freqstr", "_box_func"], DatetimeLikeArrayMixin)
class DatetimeIndexOpsMixin(ExtensionIndex):
"""
Common ops mixin to support a unified interface datetimelike Index.
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/indexes/interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,10 +182,10 @@ def func(intvidx_self, other, sort=False):
)
@inherit_names(["set_closed", "to_tuples"], IntervalArray, wrap=True)
@inherit_names(
["__array__", "overlaps", "contains", "left", "right", "length"], IntervalArray,
["__array__", "overlaps", "contains", "left", "right", "length"], IntervalArray
)
@inherit_names(
["is_non_overlapping_monotonic", "mid", "closed"], IntervalArray, cache=True,
["is_non_overlapping_monotonic", "mid", "closed"], IntervalArray, cache=True
)
class IntervalIndex(IntervalMixin, ExtensionIndex):
_typ = "intervalindex"
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexes/numeric.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,7 @@ def isin(self, values, level=None):
def _is_compatible_with_other(self, other) -> bool:
return super()._is_compatible_with_other(other) or all(
isinstance(
obj, (ABCInt64Index, ABCFloat64Index, ABCUInt64Index, ABCRangeIndex),
obj, (ABCInt64Index, ABCFloat64Index, ABCUInt64Index, ABCRangeIndex)
)
for obj in [self, other]
)
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/indexes/range.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ class RangeIndex(Int64Index):
# Constructors

def __new__(
cls, start=None, stop=None, step=None, dtype=None, copy=False, name=None,
cls, start=None, stop=None, step=None, dtype=None, copy=False, name=None
):

cls._validate_dtype(dtype)
Expand Down
30 changes: 15 additions & 15 deletions pandas/core/internals/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,7 +724,7 @@ def replace(
# _can_hold_element checks have reduced this back to the
# scalar case and we can avoid a costly object cast
return self.replace(
to_replace[0], value, inplace=inplace, regex=regex, convert=convert,
to_replace[0], value, inplace=inplace, regex=regex, convert=convert
)

# GH 22083, TypeError or ValueError occurred within error handling
Expand Down Expand Up @@ -905,7 +905,7 @@ def setitem(self, indexer, value):
return block

def putmask(
self, mask, new, inplace: bool = False, axis: int = 0, transpose: bool = False,
self, mask, new, inplace: bool = False, axis: int = 0, transpose: bool = False
) -> List["Block"]:
"""
putmask the data to the block; it is possible that we may create a
Expand Down Expand Up @@ -1292,7 +1292,7 @@ def shift(self, periods: int, axis: int = 0, fill_value=None):
return [self.make_block(new_values)]

def where(
self, other, cond, errors="raise", try_cast: bool = False, axis: int = 0,
self, other, cond, errors="raise", try_cast: bool = False, axis: int = 0
) -> List["Block"]:
"""
evaluate the block; return result block(s) from the result
Expand Down Expand Up @@ -1366,7 +1366,7 @@ def where_func(cond, values, other):
# we are explicitly ignoring errors
block = self.coerce_to_target_dtype(other)
blocks = block.where(
orig_other, cond, errors=errors, try_cast=try_cast, axis=axis,
orig_other, cond, errors=errors, try_cast=try_cast, axis=axis
)
return self._maybe_downcast(blocks, "infer")

Expand Down Expand Up @@ -1605,7 +1605,7 @@ def set(self, locs, values):
self.values = values

def putmask(
self, mask, new, inplace: bool = False, axis: int = 0, transpose: bool = False,
self, mask, new, inplace: bool = False, axis: int = 0, transpose: bool = False
) -> List["Block"]:
"""
See Block.putmask.__doc__
Expand Down Expand Up @@ -1816,7 +1816,7 @@ def diff(self, n: int, axis: int = 1) -> List["Block"]:
return super().diff(n, axis)

def shift(
self, periods: int, axis: int = 0, fill_value: Any = None,
self, periods: int, axis: int = 0, fill_value: Any = None
) -> List["ExtensionBlock"]:
"""
Shift the block by `periods`.
Expand All @@ -1833,7 +1833,7 @@ def shift(
]

def where(
self, other, cond, errors="raise", try_cast: bool = False, axis: int = 0,
self, other, cond, errors="raise", try_cast: bool = False, axis: int = 0
) -> List["Block"]:

cond = _extract_bool_array(cond)
Expand Down Expand Up @@ -1945,7 +1945,7 @@ def _can_hold_element(self, element: Any) -> bool:
)

def to_native_types(
self, na_rep="", float_format=None, decimal=".", quoting=None, **kwargs,
self, na_rep="", float_format=None, decimal=".", quoting=None, **kwargs
):
""" convert to our native types format """
values = self.values
Expand Down Expand Up @@ -2369,7 +2369,7 @@ def replace(self, to_replace, value, inplace=False, regex=False, convert=True):
if not np.can_cast(to_replace_values, bool):
return self
return super().replace(
to_replace, value, inplace=inplace, regex=regex, convert=convert,
to_replace, value, inplace=inplace, regex=regex, convert=convert
)


Expand Down Expand Up @@ -2453,18 +2453,18 @@ def replace(self, to_replace, value, inplace=False, regex=False, convert=True):

if not either_list and is_re(to_replace):
return self._replace_single(
to_replace, value, inplace=inplace, regex=True, convert=convert,
to_replace, value, inplace=inplace, regex=True, convert=convert
)
elif not (either_list or regex):
return super().replace(
to_replace, value, inplace=inplace, regex=regex, convert=convert,
to_replace, value, inplace=inplace, regex=regex, convert=convert
)
elif both_lists:
for to_rep, v in zip(to_replace, value):
result_blocks = []
for b in blocks:
result = b._replace_single(
to_rep, v, inplace=inplace, regex=regex, convert=convert,
to_rep, v, inplace=inplace, regex=regex, convert=convert
)
result_blocks = _extend_blocks(result, result_blocks)
blocks = result_blocks
Expand All @@ -2475,18 +2475,18 @@ def replace(self, to_replace, value, inplace=False, regex=False, convert=True):
result_blocks = []
for b in blocks:
result = b._replace_single(
to_rep, value, inplace=inplace, regex=regex, convert=convert,
to_rep, value, inplace=inplace, regex=regex, convert=convert
)
result_blocks = _extend_blocks(result, result_blocks)
blocks = result_blocks
return result_blocks

return self._replace_single(
to_replace, value, inplace=inplace, convert=convert, regex=regex,
to_replace, value, inplace=inplace, convert=convert, regex=regex
)

def _replace_single(
self, to_replace, value, inplace=False, regex=False, convert=True, mask=None,
self, to_replace, value, inplace=False, regex=False, convert=True, mask=None
):
"""
Replace elements by the given value.
Expand Down

0 comments on commit fe98274

Please sign in to comment.