diff --git a/pandas/_testing.py b/pandas/_testing.py index ef6232fa6d575..b402b040d9268 100644 --- a/pandas/_testing.py +++ b/pandas/_testing.py @@ -939,7 +939,7 @@ def assert_categorical_equal( if check_category_order: assert_index_equal(left.categories, right.categories, obj=f"{obj}.categories") assert_numpy_array_equal( - left.codes, right.codes, check_dtype=check_dtype, obj=f"{obj}.codes", + left.codes, right.codes, check_dtype=check_dtype, obj=f"{obj}.codes" ) else: try: @@ -948,9 +948,7 @@ def assert_categorical_equal( except TypeError: # e.g. '<' not supported between instances of 'int' and 'str' lc, rc = left.categories, right.categories - assert_index_equal( - lc, rc, obj=f"{obj}.categories", - ) + assert_index_equal(lc, rc, obj=f"{obj}.categories") assert_index_equal( left.categories.take(left.codes), right.categories.take(right.codes), @@ -1092,7 +1090,7 @@ def _raise(left, right, err_msg): if err_msg is None: if left.shape != right.shape: raise_assert_detail( - obj, f"{obj} shapes are different", left.shape, right.shape, + obj, f"{obj} shapes are different", left.shape, right.shape ) diff = 0 @@ -1559,7 +1557,7 @@ def assert_frame_equal( # shape comparison if left.shape != right.shape: raise_assert_detail( - obj, f"{obj} shape mismatch", f"{repr(left.shape)}", f"{repr(right.shape)}", + obj, f"{obj} shape mismatch", f"{repr(left.shape)}", f"{repr(right.shape)}" ) if check_like: @@ -2884,7 +2882,7 @@ def convert_rows_list_to_csv_str(rows_list: List[str]): return expected -def external_error_raised(expected_exception: Type[Exception],) -> ContextManager: +def external_error_raised(expected_exception: Type[Exception]) -> ContextManager: """ Helper function to mark pytest.raises that have an external error message. diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index befde7c355818..2a6e983eff3ee 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -462,7 +462,7 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: def _factorize_array( - values, na_sentinel: int = -1, size_hint=None, na_value=None, mask=None, + values, na_sentinel: int = -1, size_hint=None, na_value=None, mask=None ) -> Tuple[np.ndarray, np.ndarray]: """ Factorize an array-like to codes and uniques. diff --git a/pandas/core/arrays/_mixins.py b/pandas/core/arrays/_mixins.py index 832d09b062265..2976747d66dfa 100644 --- a/pandas/core/arrays/_mixins.py +++ b/pandas/core/arrays/_mixins.py @@ -40,7 +40,7 @@ def take( fill_value = self._validate_fill_value(fill_value) new_data = take( - self._ndarray, indices, allow_fill=allow_fill, fill_value=fill_value, + self._ndarray, indices, allow_fill=allow_fill, fill_value=fill_value ) return self._from_backing_data(new_data) diff --git a/pandas/core/arrays/categorical.py b/pandas/core/arrays/categorical.py index a28b341669918..27b1afdb438cb 100644 --- a/pandas/core/arrays/categorical.py +++ b/pandas/core/arrays/categorical.py @@ -1505,7 +1505,7 @@ def argsort(self, ascending=True, kind="quicksort", **kwargs): return super().argsort(ascending=ascending, kind=kind, **kwargs) def sort_values( - self, inplace: bool = False, ascending: bool = True, na_position: str = "last", + self, inplace: bool = False, ascending: bool = True, na_position: str = "last" ): """ Sort the Categorical by category value returning a new diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index 57df067c7b16e..d83ff91a1315f 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -138,7 +138,7 @@ def __from_arrow__( return IntegerArray._concat_same_type(results) -def integer_array(values, dtype=None, copy: bool = False,) -> "IntegerArray": +def integer_array(values, dtype=None, copy: bool = False) -> "IntegerArray": """ Infer and return an integer array of the values. @@ -182,7 +182,7 @@ def safe_cast(values, dtype, copy: bool): def coerce_to_array( - values, dtype, mask=None, copy: bool = False, + values, dtype, mask=None, copy: bool = False ) -> Tuple[np.ndarray, np.ndarray]: """ Coerce the input values array to numpy arrays with a mask diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py index 235840d6d201e..1237dea5c1a64 100644 --- a/pandas/core/arrays/masked.py +++ b/pandas/core/arrays/masked.py @@ -126,7 +126,7 @@ def __invert__(self: BaseMaskedArrayT) -> BaseMaskedArrayT: return type(self)(~self._data, self._mask) def to_numpy( - self, dtype=None, copy: bool = False, na_value: Scalar = lib.no_default, + self, dtype=None, copy: bool = False, na_value: Scalar = lib.no_default ) -> np.ndarray: """ Convert to a NumPy Array. diff --git a/pandas/core/arrays/numpy_.py b/pandas/core/arrays/numpy_.py index 05f901518d82f..23a4a70734c81 100644 --- a/pandas/core/arrays/numpy_.py +++ b/pandas/core/arrays/numpy_.py @@ -280,7 +280,7 @@ def isna(self) -> np.ndarray: return isna(self._ndarray) def fillna( - self, value=None, method: Optional[str] = None, limit: Optional[int] = None, + self, value=None, method: Optional[str] = None, limit: Optional[int] = None ) -> "PandasArray": # TODO(_values_for_fillna): remove this value, method = validate_fillna_kwargs(value, method) diff --git a/pandas/core/arrays/period.py b/pandas/core/arrays/period.py index ddaf6d39f1837..cc39ffb5d1203 100644 --- a/pandas/core/arrays/period.py +++ b/pandas/core/arrays/period.py @@ -634,7 +634,7 @@ def _sub_period_array(self, other): return new_values def _addsub_int_array( - self, other: np.ndarray, op: Callable[[Any, Any], Any], + self, other: np.ndarray, op: Callable[[Any, Any], Any] ) -> "PeriodArray": """ Add or subtract array of integers; equivalent to applying diff --git a/pandas/core/construction.py b/pandas/core/construction.py index e8c9f28e50084..f145e76046bee 100644 --- a/pandas/core/construction.py +++ b/pandas/core/construction.py @@ -514,9 +514,7 @@ def sanitize_array( return subarr -def _try_cast( - arr, dtype: Optional[DtypeObj], copy: bool, raise_cast_failure: bool, -): +def _try_cast(arr, dtype: Optional[DtypeObj], copy: bool, raise_cast_failure: bool): """ Convert input to numpy ndarray and optionally cast to a given dtype. diff --git a/pandas/core/generic.py b/pandas/core/generic.py index 286da6e1de9d5..fea3efedb6abb 100644 --- a/pandas/core/generic.py +++ b/pandas/core/generic.py @@ -315,17 +315,13 @@ def _data(self): @property def _AXIS_NUMBERS(self) -> Dict[str, int]: """.. deprecated:: 1.1.0""" - warnings.warn( - "_AXIS_NUMBERS has been deprecated.", FutureWarning, stacklevel=3, - ) + warnings.warn("_AXIS_NUMBERS has been deprecated.", FutureWarning, stacklevel=3) return {"index": 0} @property def _AXIS_NAMES(self) -> Dict[int, str]: """.. deprecated:: 1.1.0""" - warnings.warn( - "_AXIS_NAMES has been deprecated.", FutureWarning, stacklevel=3, - ) + warnings.warn("_AXIS_NAMES has been deprecated.", FutureWarning, stacklevel=3) return {0: "index"} def _construct_axes_dict(self, axes=None, **kwargs): @@ -5128,7 +5124,7 @@ def pipe(self, func, *args, **kwargs): ... .pipe(g, arg1=a) ... .pipe((func, 'arg2'), arg1=a, arg3=c) ... ) # doctest: +SKIP - """ + """ return com.pipe(self, func, *args, **kwargs) _shared_docs["aggregate"] = dedent( @@ -5630,7 +5626,7 @@ def astype( else: # else, only a single dtype is given - new_data = self._mgr.astype(dtype=dtype, copy=copy, errors=errors,) + new_data = self._mgr.astype(dtype=dtype, copy=copy, errors=errors) return self._constructor(new_data).__finalize__(self, method="astype") # GH 33113: handle empty frame or series @@ -6520,7 +6516,7 @@ def replace( 3 b 4 b dtype: object - """ + """ if not ( is_scalar(to_replace) or is_re_compilable(to_replace) @@ -7772,7 +7768,7 @@ def between_time( raise TypeError("Index must be DatetimeIndex") indexer = index.indexer_between_time( - start_time, end_time, include_start=include_start, include_end=include_end, + start_time, end_time, include_start=include_start, include_end=include_end ) return self._take_with_is_copy(indexer, axis=axis) @@ -8939,7 +8935,7 @@ def _where( self._check_inplace_setting(other) new_data = self._mgr.putmask( - mask=cond, new=other, align=align, axis=block_axis, + mask=cond, new=other, align=align, axis=block_axis ) result = self._constructor(new_data) return self._update_inplace(result)