diff --git a/pandas-stubs/_libs/lib.pyi b/pandas-stubs/_libs/lib.pyi index 06f31f0db..7622a13a5 100644 --- a/pandas-stubs/_libs/lib.pyi +++ b/pandas-stubs/_libs/lib.pyi @@ -5,7 +5,6 @@ from typing import ( ) import numpy as np -from pandas import Interval from typing_extensions import ( TypeAlias, TypeGuard, @@ -21,7 +20,6 @@ def infer_dtype(value: object, skipna: bool = ...) -> str: ... def is_iterator(obj: object) -> bool: ... def is_scalar(val: object) -> bool: ... def is_list_like(obj: object, allow_sets: bool = ...) -> bool: ... -def is_interval(val: object) -> TypeGuard[Interval]: ... def is_complex(val: object) -> TypeGuard[complex]: ... def is_bool(val: object) -> TypeGuard[bool | np.bool_]: ... def is_integer(val: object) -> TypeGuard[int | np.integer]: ... diff --git a/pandas-stubs/api/types/__init__.pyi b/pandas-stubs/api/types/__init__.pyi index 8f5f9cbf1..23e267f91 100644 --- a/pandas-stubs/api/types/__init__.pyi +++ b/pandas-stubs/api/types/__init__.pyi @@ -19,7 +19,6 @@ from pandas.core.dtypes.api import ( is_hashable as is_hashable, is_integer as is_integer, is_integer_dtype as is_integer_dtype, - is_interval as is_interval, is_iterator as is_iterator, is_list_like as is_list_like, is_named_tuple as is_named_tuple, diff --git a/pandas-stubs/core/dtypes/api.pyi b/pandas-stubs/core/dtypes/api.pyi index 419b16466..629e5409f 100644 --- a/pandas-stubs/core/dtypes/api.pyi +++ b/pandas-stubs/core/dtypes/api.pyi @@ -17,7 +17,6 @@ from pandas.core.dtypes.common import ( is_hashable as is_hashable, is_integer as is_integer, is_integer_dtype as is_integer_dtype, - is_interval as is_interval, is_iterator as is_iterator, is_list_like as is_list_like, is_named_tuple as is_named_tuple, diff --git a/pandas-stubs/core/dtypes/common.pyi b/pandas-stubs/core/dtypes/common.pyi index 75b5cdd2a..c4a7bfc32 100644 --- a/pandas-stubs/core/dtypes/common.pyi +++ b/pandas-stubs/core/dtypes/common.pyi @@ -17,7 +17,6 @@ from pandas.core.dtypes.inference import ( is_float as is_float, is_hashable as is_hashable, is_integer as is_integer, - is_interval as is_interval, is_iterator as is_iterator, is_list_like as is_list_like, is_named_tuple as is_named_tuple, diff --git a/pandas-stubs/core/dtypes/inference.pyi b/pandas-stubs/core/dtypes/inference.pyi index 6346b3e27..649a473f7 100644 --- a/pandas-stubs/core/dtypes/inference.pyi +++ b/pandas-stubs/core/dtypes/inference.pyi @@ -5,7 +5,6 @@ is_integer = lib.is_integer is_float = lib.is_float is_complex = lib.is_complex is_scalar = lib.is_scalar -is_interval = lib.is_interval is_list_like = lib.is_list_like is_iterator = lib.is_iterator diff --git a/pandas-stubs/core/frame.pyi b/pandas-stubs/core/frame.pyi index 986b43b0b..3e687364f 100644 --- a/pandas-stubs/core/frame.pyi +++ b/pandas-stubs/core/frame.pyi @@ -710,7 +710,6 @@ class DataFrame(NDFrame, OpsMixin): self, value: Scalar | NAType | dict | Series | DataFrame | None = ..., *, - method: FillnaOptions | None = ..., axis: Axis | None = ..., limit: int = ..., downcast: dict | None = ..., @@ -721,7 +720,6 @@ class DataFrame(NDFrame, OpsMixin): self, value: Scalar | NAType | dict | Series | DataFrame | None = ..., *, - method: FillnaOptions | None = ..., axis: Axis | None = ..., limit: int = ..., downcast: dict | None = ..., @@ -732,7 +730,6 @@ class DataFrame(NDFrame, OpsMixin): self, value: Scalar | NAType | dict | Series | DataFrame | None = ..., *, - method: FillnaOptions | None = ..., axis: Axis | None = ..., inplace: _bool | None = ..., limit: int = ..., diff --git a/pandas-stubs/core/generic.pyi b/pandas-stubs/core/generic.pyi index e32ee8a5e..431b9dfef 100644 --- a/pandas-stubs/core/generic.pyi +++ b/pandas-stubs/core/generic.pyi @@ -123,6 +123,7 @@ class NDFrame(indexing.IndexingMixin): def to_hdf( self, path_or_buf: FilePath | HDFStore, + *, key: _str, mode: Literal["a", "w", "r+"] = ..., complevel: int | None = ..., @@ -403,7 +404,6 @@ class NDFrame(indexing.IndexingMixin): self, value=..., *, - method=..., axis=..., inplace: _bool = ..., limit=..., diff --git a/pandas-stubs/core/groupby/generic.pyi b/pandas-stubs/core/groupby/generic.pyi index e14c104eb..715899fca 100644 --- a/pandas-stubs/core/groupby/generic.pyi +++ b/pandas-stubs/core/groupby/generic.pyi @@ -316,17 +316,6 @@ class DataFrameGroupBy(GroupBy[DataFrame], Generic[ByT]): ascending: bool = ..., dropna: bool = ..., ) -> Series[float]: ... - def fillna( - self, - value: ( - Scalar | ArrayLike | Series | DataFrame | Mapping[Hashable, Scalar] | None - ) = ..., - method: Literal["bfill", "ffill"] | None = ..., - axis: Axis | None | NoDefault = ..., - inplace: Literal[False] = ..., - limit: int | None = ..., - downcast: dict | None | NoDefault = ..., - ) -> DataFrame: ... def take( self, indices: TakeIndexer, axis: Axis | None | NoDefault = ..., **kwargs ) -> DataFrame: ... diff --git a/pandas-stubs/core/indexes/datetimes.pyi b/pandas-stubs/core/indexes/datetimes.pyi index 57ae07935..50c2778ad 100644 --- a/pandas-stubs/core/indexes/datetimes.pyi +++ b/pandas-stubs/core/indexes/datetimes.pyi @@ -44,8 +44,6 @@ class DatetimeIndex(DatetimeTimedeltaMixin[Timestamp], DatetimeIndexProperties): data: ArrayLike | AnyArrayLike | list | tuple, freq=..., tz=..., - normalize: bool = ..., - closed=..., ambiguous: str = ..., dayfirst: bool = ..., yearfirst: bool = ..., diff --git a/pandas-stubs/core/series.pyi b/pandas-stubs/core/series.pyi index 1cc4c59d0..383c8df99 100644 --- a/pandas-stubs/core/series.pyi +++ b/pandas-stubs/core/series.pyi @@ -1041,7 +1041,6 @@ class Series(IndexOpsMixin[S1], NDFrame): self, value: Scalar | NAType | dict | Series[S1] | DataFrame | None = ..., *, - method: FillnaOptions | None = ..., axis: AxisIndex = ..., limit: int | None = ..., downcast: dict | None = ..., @@ -1052,7 +1051,6 @@ class Series(IndexOpsMixin[S1], NDFrame): self, value: Scalar | NAType | dict | Series[S1] | DataFrame | None = ..., *, - method: FillnaOptions | None = ..., axis: AxisIndex = ..., limit: int | None = ..., downcast: dict | None = ..., @@ -1063,7 +1061,6 @@ class Series(IndexOpsMixin[S1], NDFrame): self, value: Scalar | NAType | dict | Series[S1] | DataFrame | None = ..., *, - method: FillnaOptions | None = ..., axis: AxisIndex = ..., inplace: _bool = ..., limit: int | None = ..., diff --git a/pandas-stubs/core/tools/numeric.pyi b/pandas-stubs/core/tools/numeric.pyi index 7b998b517..5bfde0413 100644 --- a/pandas-stubs/core/tools/numeric.pyi +++ b/pandas-stubs/core/tools/numeric.pyi @@ -25,13 +25,6 @@ def to_numeric( dtype_backend: DtypeBackend | NoDefault = ..., ) -> float: ... @overload -def to_numeric( - arg: Scalar, - errors: Literal["ignore"], - downcast: _Downcast = ..., - dtype_backend: DtypeBackend | NoDefault = ..., -) -> Scalar: ... -@overload def to_numeric( arg: list | tuple | np.ndarray, errors: IgnoreRaiseCoerce = ..., diff --git a/pyproject.toml b/pyproject.toml index d10abb29c..181609442 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,10 +36,10 @@ numpy = { version = ">=1.26.0", python = "<3.13" } [tool.poetry.group.dev.dependencies] mypy = "1.8.0" -pandas = "2.1.4" +pandas = "2.2.0" pyarrow = ">=10.0.1" pytest = ">=7.1.2" -pyright = "==1.1.346" +pyright = ">=1.1.350" poethepoet = ">=0.16.5" loguru = ">=0.6.0" typing-extensions = ">=4.4.0" diff --git a/tests/__init__.py b/tests/__init__.py index 166b5a3e5..520f02b11 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -37,7 +37,9 @@ def check(actual: T, klass: type, dtype: type | None = None, attr: str = "left") elif isinstance(actual, BaseGroupBy): value = actual.obj elif hasattr(actual, "__iter__"): - value = next(iter(actual)) # pyright: ignore[reportGeneralTypeIssues] + value = next( + iter(actual) # pyright: ignore[reportArgumentType,reportCallIssue] + ) else: assert hasattr(actual, attr) value = getattr(actual, attr) diff --git a/tests/extension/decimal/array.py b/tests/extension/decimal/array.py index f29e73e58..0bf86335d 100644 --- a/tests/extension/decimal/array.py +++ b/tests/extension/decimal/array.py @@ -142,7 +142,7 @@ def __array_ufunc__(self, ufunc: np.ufunc, method: str, *inputs, **kwargs): return result if "out" in kwargs: - return arraylike.dispatch_ufunc_with_out( # type: ignore[attr-defined] # pyright: ignore[reportGeneralTypeIssues] + return arraylike.dispatch_ufunc_with_out( # type: ignore[attr-defined] # pyright: ignore[reportAttributeAccessIssue] self, ufunc, method, *inputs, **kwargs ) @@ -150,7 +150,7 @@ def __array_ufunc__(self, ufunc: np.ufunc, method: str, *inputs, **kwargs): result = getattr(ufunc, method)(*inputs, **kwargs) if method == "reduce": - result = arraylike.dispatch_reduction_ufunc( # type: ignore[attr-defined] # pyright: ignore[reportGeneralTypeIssues] + result = arraylike.dispatch_reduction_ufunc( # type: ignore[attr-defined] # pyright: ignore[reportAttributeAccessIssue] self, ufunc, method, *inputs, **kwargs ) if result is not NotImplemented: diff --git a/tests/test_api_types.py b/tests/test_api_types.py index 0ebb37cce..a17a937de 100644 --- a/tests/test_api_types.py +++ b/tests/test_api_types.py @@ -6,10 +6,7 @@ from pandas._typing import DtypeObj -from tests import ( - check, - pytest_warns_bounded, -) +from tests import check nparr = np.array([1, 2, 3]) arr = pd.Series([1, 2, 3]) @@ -209,21 +206,6 @@ def test_is_integer_dtype() -> None: # check(assert_type(api.is_integer_dtype(ExtensionDtype), bool), bool) pandas GH 50923 -def test_is_interval() -> None: - with pytest_warns_bounded( - FutureWarning, "is_interval is deprecated", lower="2.1.99" - ): - check(assert_type(api.is_interval(obj), bool), bool) - check(assert_type(api.is_interval(nparr), bool), bool) - check(assert_type(api.is_interval(dtylike), bool), bool) - check(assert_type(api.is_interval(arr), bool), bool) - check( - assert_type(api.is_interval(dframe), bool), - bool, - ) - check(assert_type(api.is_interval(ind), bool), bool) - - def test_is_iterator() -> None: check(assert_type(api.is_iterator(obj), bool), bool) check(assert_type(api.is_iterator(nparr), bool), bool) diff --git a/tests/test_dtypes.py b/tests/test_dtypes.py index c9b2c4a5b..3736fd09f 100644 --- a/tests/test_dtypes.py +++ b/tests/test_dtypes.py @@ -59,10 +59,10 @@ def test_period_dtype() -> None: check(assert_type(pd.PeriodDtype(freq=Day()), pd.PeriodDtype), pd.PeriodDtype) if TYPE_CHECKING_INVALID_USAGE: pd.PeriodDtype( - freq=CustomBusinessDay() # type:ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + freq=CustomBusinessDay() # type:ignore[arg-type] # pyright: ignore[reportArgumentType] ) pd.PeriodDtype( - freq=BusinessDay() # type:ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + freq=BusinessDay() # type:ignore[arg-type] # pyright: ignore[reportArgumentType] ) check( assert_type(p_dt.freq, pd.tseries.offsets.BaseOffset), diff --git a/tests/test_frame.py b/tests/test_frame.py index f26623a26..8940179c4 100644 --- a/tests/test_frame.py +++ b/tests/test_frame.py @@ -126,22 +126,22 @@ def test_types_append() -> None: df = pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]}) df2 = pd.DataFrame({"col1": [10, 20], "col2": [30, 40]}) if TYPE_CHECKING_INVALID_USAGE: - res1: pd.DataFrame = df.append(df2) # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - res2: pd.DataFrame = df.append([1, 2, 3]) # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - res3: pd.DataFrame = df.append([[1, 2, 3]]) # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - res4: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + res1: pd.DataFrame = df.append(df2) # type: ignore[operator] # pyright: ignore[reportCallIssue] + res2: pd.DataFrame = df.append([1, 2, 3]) # type: ignore[operator] # pyright: ignore[reportCallIssue] + res3: pd.DataFrame = df.append([[1, 2, 3]]) # type: ignore[operator] # pyright: ignore[reportCallIssue] + res4: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportCallIssue] {("a", 1): [1, 2, 3], "b": df2}, ignore_index=True ) - res5: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + res5: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportCallIssue] {1: [1, 2, 3]}, ignore_index=True ) - res6: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + res6: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportCallIssue] {1: [1, 2, 3], "col2": [1, 2, 3]}, ignore_index=True ) - res7: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + res7: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportCallIssue] pd.Series([5, 6]), ignore_index=True ) - res8: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + res8: pd.DataFrame = df.append( # type: ignore[operator] # pyright: ignore[reportCallIssue] pd.Series([5, 6], index=["col1", "col2"]), ignore_index=True ) @@ -343,10 +343,7 @@ def test_types_dropna() -> None: def test_types_fillna() -> None: df = pd.DataFrame(data={"col1": [np.nan, np.nan], "col2": [3, np.nan]}) res: pd.DataFrame = df.fillna(0) - with pytest_warns_bounded( - FutureWarning, "DataFrame.fillna with 'method' is deprecated", lower="2.0.99" - ): - res2: None = df.fillna(method="pad", axis=1, inplace=True) + res2: None = df.fillna(0, axis=1, inplace=True) def test_types_sort_index() -> None: @@ -427,8 +424,8 @@ def test_types_mean() -> None: s2: pd.Series = df.mean(axis=0) df2: pd.DataFrame = df.groupby(level=0).mean() if TYPE_CHECKING_INVALID_USAGE: - df3: pd.DataFrame = df.groupby(axis=1, level=0).mean() # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] - df4: pd.DataFrame = df.groupby(axis=1, level=0, dropna=True).mean() # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + df3: pd.DataFrame = df.groupby(axis=1, level=0).mean() # type: ignore[call-overload] # pyright: ignore[reportArgumentType] + df4: pd.DataFrame = df.groupby(axis=1, level=0, dropna=True).mean() # type: ignore[call-overload] # pyright: ignore[reportArgumentType] s3: pd.Series = df.mean(axis=1, skipna=True, numeric_only=False) @@ -438,8 +435,8 @@ def test_types_median() -> None: s2: pd.Series = df.median(axis=0) df2: pd.DataFrame = df.groupby(level=0).median() if TYPE_CHECKING_INVALID_USAGE: - df3: pd.DataFrame = df.groupby(axis=1, level=0).median() # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] - df4: pd.DataFrame = df.groupby(axis=1, level=0, dropna=True).median() # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + df3: pd.DataFrame = df.groupby(axis=1, level=0).median() # type: ignore[call-overload] # pyright: ignore[reportArgumentType] + df4: pd.DataFrame = df.groupby(axis=1, level=0, dropna=True).median() # type: ignore[call-overload] # pyright: ignore[reportArgumentType] s3: pd.Series = df.median(axis=1, skipna=True, numeric_only=False) @@ -957,9 +954,12 @@ def test_types_groupby() -> None: with pytest_warns_bounded( FutureWarning, "(The provided callable is currently using|The behavior of DataFrame.sum with)", - lower="2.0.99", ): - df7: pd.DataFrame = df.groupby(by="col1").apply(sum) + with pytest_warns_bounded( + DeprecationWarning, + "DataFrameGroupBy.apply operated on the grouping columns", + ): + df7: pd.DataFrame = df.groupby(by="col1").apply(sum) df8: pd.DataFrame = df.groupby("col1").transform("sum") s1: pd.Series = df.set_index("col1")["col2"] s2: pd.Series = s1.groupby("col1").transform("sum") @@ -1140,9 +1140,9 @@ def test_types_window() -> None: df = pd.DataFrame(data={"col1": [1, 1, 2], "col2": [3, 4, 5]}) df.expanding() if TYPE_CHECKING_INVALID_USAGE: - df.expanding(axis=1) # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] - df.rolling(2, axis=1, center=True) # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] - df.expanding(axis=1, center=True) # type: ignore[arg-type, call-arg] # pyright: ignore[reportGeneralTypeIssues] + df.expanding(axis=1) # type: ignore[arg-type] # pyright: ignore[reportArgumentType] + df.rolling(2, axis=1, center=True) # type: ignore[call-overload] # pyright: ignore[reportArgumentType] + df.expanding(axis=1, center=True) # type: ignore[arg-type, call-arg] # pyright: ignore[reportCallIssue] df.rolling(2) @@ -1480,51 +1480,51 @@ def qux( ) if TYPE_CHECKING_INVALID_USAGE: - df.pipe( + df.pipe( # pyright: ignore[reportCallIssue] qux, - "a", # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + "a", # type: ignore[arg-type] # pyright: ignore[reportArgumentType] [1.0, 2.0], argument_2="hi", keyword_only=(1, 2), ) - df.pipe( + df.pipe( # pyright: ignore[reportCallIssue] qux, 1, - [1.0, "b"], # type: ignore[list-item] # pyright: ignore[reportGeneralTypeIssues] + [1.0, "b"], # type: ignore[list-item] # pyright: ignore[reportArgumentType] argument_2="hi", keyword_only=(1, 2), ) - df.pipe( + df.pipe( # pyright: ignore[reportCallIssue] qux, 1, [1.0, 2.0], - argument_2=11, # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + argument_2=11, # type: ignore[arg-type] # pyright: ignore[reportArgumentType] keyword_only=(1, 2), ) - df.pipe( + df.pipe( # pyright: ignore[reportCallIssue] qux, 1, [1.0, 2.0], argument_2="hi", - keyword_only=(1,), # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + keyword_only=(1,), # type: ignore[arg-type] # pyright: ignore[reportArgumentType] ) - df.pipe( # type: ignore[call-arg] + df.pipe( # type: ignore[call-arg] # pyright: ignore[reportCallIssue] qux, 1, [1.0, 2.0], - argument_3="hi", # pyright: ignore[reportGeneralTypeIssues] + argument_3="hi", # pyright: ignore[reportCallIssue] keyword_only=(1, 2), ) - df.pipe( # type: ignore[misc] + df.pipe( # type: ignore[misc] # pyright: ignore[reportCallIssue] qux, 1, [1.0, 2.0], 11, # type: ignore[arg-type] - (1, 2), # pyright: ignore[reportGeneralTypeIssues] + (1, 2), # pyright: ignore[reportCallIssue] ) - df.pipe( # type: ignore[call-arg] + df.pipe( # type: ignore[call-arg] # pyright: ignore[reportCallIssue] qux, - positional_only=1, # pyright: ignore[reportGeneralTypeIssues] + positional_only=1, # pyright: ignore[reportCallIssue] argument_1=[1.0, 2.0], argument_2=11, # type: ignore[arg-type] keyword_only=(1, 2), @@ -1548,16 +1548,16 @@ def dataframe_not_first_arg(x: int, df: pd.DataFrame) -> pd.DataFrame: ) if TYPE_CHECKING_INVALID_USAGE: - df.pipe( + df.pipe( # pyright: ignore[reportCallIssue] ( dataframe_not_first_arg, # type: ignore[arg-type] - 1, # pyright: ignore[reportGeneralTypeIssues] + 1, # pyright: ignore[reportArgumentType] ), 1, ) - df.pipe( - ( # pyright: ignore[reportGeneralTypeIssues] - 1, # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + df.pipe( # pyright: ignore[reportCallIssue] + ( + 1, # type: ignore[arg-type] # pyright: ignore[reportArgumentType] "df", ), 1, @@ -1709,19 +1709,12 @@ def test_types_regressions() -> None: # https://github.com/microsoft/python-type-stubs/issues/115 df = pd.DataFrame({"A": [1, 2, 3], "B": [5, 6, 7]}) - with pytest_warns_bounded( - FutureWarning, - "The '(closed|normalize)' keyword in DatetimeIndex construction is deprecated", - lower="2.0.99", - ): - pd.DatetimeIndex( - data=df["A"], - tz=None, - normalize=False, - closed=None, - ambiguous="NaT", - copy=True, - ) + pd.DatetimeIndex( + data=df["A"], + tz=None, + ambiguous="NaT", + copy=True, + ) def test_read_csv() -> None: @@ -2086,7 +2079,7 @@ def test_set_columns() -> None: df.columns = (1, 2) # type: ignore[assignment] df.columns = (1, "a") # type: ignore[assignment] if TYPE_CHECKING_INVALID_USAGE: - df.columns = "abc" # type: ignore[assignment] # pyright: ignore[reportGeneralTypeIssues] + df.columns = "abc" # type: ignore[assignment] # pyright: ignore[reportAttributeAccessIssue] def test_frame_index_numpy() -> None: @@ -2149,9 +2142,9 @@ def test_func(h: Hashable): pass if TYPE_CHECKING_INVALID_USAGE: - test_func(pd.DataFrame()) # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] - test_func(pd.Series([], dtype=object)) # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] - test_func(pd.Index([])) # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + test_func(pd.DataFrame()) # type: ignore[arg-type] # pyright: ignore[reportArgumentType] + test_func(pd.Series([], dtype=object)) # type: ignore[arg-type] # pyright: ignore[reportArgumentType] + test_func(pd.Index([])) # type: ignore[arg-type] # pyright: ignore[reportArgumentType] def test_columns_mixlist() -> None: @@ -2407,9 +2400,8 @@ def sum_mean(x: pd.DataFrame) -> float: return x.sum().mean() with pytest_warns_bounded( - FutureWarning, + DeprecationWarning, "DataFrameGroupBy.apply operated on the grouping columns.", - lower="2.1.99", ): check( assert_type(df.groupby("col1").apply(sum_mean), pd.Series), @@ -2418,9 +2410,8 @@ def sum_mean(x: pd.DataFrame) -> float: lfunc: Callable[[pd.DataFrame], float] = lambda x: x.sum().mean() with pytest_warns_bounded( - FutureWarning, + DeprecationWarning, "DataFrameGroupBy.apply operated on the grouping columns.", - lower="2.1.99", ): check(assert_type(df.groupby("col1").apply(lfunc), pd.Series), pd.Series) @@ -2428,9 +2419,8 @@ def sum_to_list(x: pd.DataFrame) -> list: return x.sum().tolist() with pytest_warns_bounded( - FutureWarning, + DeprecationWarning, "DataFrameGroupBy.apply operated on the grouping columns.", - lower="2.1.99", ): check(assert_type(df.groupby("col1").apply(sum_to_list), pd.Series), pd.Series) @@ -2438,9 +2428,8 @@ def sum_to_series(x: pd.DataFrame) -> pd.Series: return x.sum() with pytest_warns_bounded( - FutureWarning, + DeprecationWarning, "DataFrameGroupBy.apply operated on the grouping columns.", - lower="2.1.99", ): check( assert_type(df.groupby("col1").apply(sum_to_series), pd.DataFrame), @@ -2451,9 +2440,8 @@ def sample_to_df(x: pd.DataFrame) -> pd.DataFrame: return x.sample() with pytest_warns_bounded( - FutureWarning, + DeprecationWarning, "DataFrameGroupBy.apply operated on the grouping columns.", - lower="2.1.99", ): check( assert_type( @@ -2570,7 +2558,7 @@ def test_to_dict() -> None: def test(mapping: Mapping) -> None: # pyright: ignore[reportUnusedFunction] DF.to_dict( # type: ignore[call-overload] - into=mapping # pyright: ignore[reportGeneralTypeIssues] + into=mapping # pyright: ignore[reportArgumentType,reportCallIssue] ) @@ -3001,10 +2989,10 @@ def test_to_dict_index() -> None: assert_type(df.to_dict(orient="split", index=False), dict[Hashable, Any]), dict ) if TYPE_CHECKING_INVALID_USAGE: - check(assert_type(df.to_dict(orient="records", index=False), list[dict[Hashable, Any]]), list) # type: ignore[assert-type, call-overload] # pyright: ignore[reportGeneralTypeIssues] - check(assert_type(df.to_dict(orient="dict", index=False), dict[Hashable, Any]), dict) # type: ignore[assert-type, call-overload] # pyright: ignore[reportGeneralTypeIssues] - check(assert_type(df.to_dict(orient="series", index=False), dict[Hashable, Any]), dict) # type: ignore[assert-type, call-overload] # pyright: ignore[reportGeneralTypeIssues] - check(assert_type(df.to_dict(orient="index", index=False), dict[Hashable, Any]), dict) # type: ignore[assert-type, call-overload] # pyright: ignore[reportGeneralTypeIssues] + check(assert_type(df.to_dict(orient="records", index=False), list[dict[Hashable, Any]]), list) # type: ignore[assert-type, call-overload] # pyright: ignore[reportArgumentType,reportAssertTypeFailure,reportCallIssue] + check(assert_type(df.to_dict(orient="dict", index=False), dict[Hashable, Any]), dict) # type: ignore[assert-type, call-overload] # pyright: ignore[reportArgumentType,reportAssertTypeFailure,reportCallIssue] + check(assert_type(df.to_dict(orient="series", index=False), dict[Hashable, Any]), dict) # type: ignore[assert-type, call-overload] # pyright: ignore[reportArgumentType,reportAssertTypeFailure,reportCallIssue] + check(assert_type(df.to_dict(orient="index", index=False), dict[Hashable, Any]), dict) # type: ignore[assert-type, call-overload] # pyright: ignore[reportArgumentType,reportAssertTypeFailure,reportCallIssue] def test_suffix_prefix_index() -> None: @@ -3044,7 +3032,7 @@ def test_to_json_mode() -> None: check(assert_type(result2, str), str) check(assert_type(result4, str), str) if TYPE_CHECKING_INVALID_USAGE: - result3 = df.to_json(orient="records", lines=False, mode="a") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + result3 = df.to_json(orient="records", lines=False, mode="a") # type: ignore[call-overload] # pyright: ignore[reportArgumentType,reportCallIssue] def test_interpolate_inplace() -> None: @@ -3058,18 +3046,6 @@ def test_interpolate_inplace() -> None: check(assert_type(df.interpolate(method="linear", inplace=True), None), type(None)) -def test_groupby_fillna_inplace() -> None: - # GH 691 - groupby = pd.DataFrame({"a": range(3), "b": range(3)}).groupby("a") - with pytest_warns_bounded( - FutureWarning, "DataFrameGroupBy.fillna is deprecated", lower="2.1.99" - ): - check(assert_type(groupby.fillna(0), pd.DataFrame), pd.DataFrame) - check(assert_type(groupby.fillna(0, inplace=False), pd.DataFrame), pd.DataFrame) - if TYPE_CHECKING_INVALID_USAGE: - groupby.fillna(0, inplace=True) # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] - - def test_getitem_generator() -> None: # GH 685 check(assert_type(DF[(f"col{i+1}" for i in range(2))], pd.DataFrame), pd.DataFrame) diff --git a/tests/test_groupby.py b/tests/test_groupby.py index 5b5560162..88e01ed06 100644 --- a/tests/test_groupby.py +++ b/tests/test_groupby.py @@ -79,38 +79,42 @@ def test_frame_groupby_resample() -> None: check(assert_type(GB_DF.resample(M).ax, Index), DatetimeIndex) # agg funcs - check(assert_type(GB_DF.resample(M).sum(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).prod(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).min(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).max(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).first(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).last(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).mean(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).sum(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).median(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).ohlc(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).nunique(), DataFrame), DataFrame) + with pytest_warns_bounded( + DeprecationWarning, + "DataFrameGroupBy.(apply|resample) operated on the grouping columns", + ): + check(assert_type(GB_DF.resample(M).sum(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).prod(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).min(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).max(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).first(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).last(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).mean(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).sum(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).median(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).ohlc(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).nunique(), DataFrame), DataFrame) - # quantile - check(assert_type(GB_DF.resample(M).quantile(0.5), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).quantile([0.5, 0.7]), DataFrame), DataFrame) - check( - assert_type(GB_DF.resample(M).quantile(np.array([0.5, 0.7])), DataFrame), - DataFrame, - ) + # quantile + check(assert_type(GB_DF.resample(M).quantile(0.5), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).quantile([0.5, 0.7]), DataFrame), DataFrame) + check( + assert_type(GB_DF.resample(M).quantile(np.array([0.5, 0.7])), DataFrame), + DataFrame, + ) - # std / var - check(assert_type(GB_DF.resample(M).std(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).var(2), DataFrame), DataFrame) + # std / var + check(assert_type(GB_DF.resample(M).std(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).var(2), DataFrame), DataFrame) - # size / count - check(assert_type(GB_DF.resample(M).size(), "Series[int]"), Series, np.integer) - check(assert_type(GB_DF.resample(M).count(), DataFrame), DataFrame) + # size / count + check(assert_type(GB_DF.resample(M).size(), "Series[int]"), Series, np.integer) + check(assert_type(GB_DF.resample(M).count(), DataFrame), DataFrame) - # filling - check(assert_type(GB_DF.resample(M).ffill(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).nearest(), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).bfill(), DataFrame), DataFrame) + # filling + check(assert_type(GB_DF.resample(M).ffill(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).nearest(), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).bfill(), DataFrame), DataFrame) # fillna (deprecated) if TYPE_CHECKING_INVALID_USAGE: @@ -118,42 +122,52 @@ def test_frame_groupby_resample() -> None: # aggregate / apply with pytest_warns_bounded( - FutureWarning, - r"The provided callable is currently using ", - lower="2.0.99", + DeprecationWarning, + "DataFrameGroupBy.(apply|resample) operated on the grouping columns", ): - check(assert_type(GB_DF.resample(M).aggregate(np.sum), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).agg(np.sum), DataFrame), DataFrame) - check(assert_type(GB_DF.resample(M).apply(np.sum), DataFrame), DataFrame) - check( - assert_type(GB_DF.resample(M).aggregate([np.sum, np.mean]), DataFrame), - DataFrame, - ) - check( - assert_type(GB_DF.resample(M).aggregate(["sum", np.mean]), DataFrame), - DataFrame, - ) - check( - assert_type( - GB_DF.resample(M).aggregate({"col1": "sum", "col2": np.mean}), + with pytest_warns_bounded( + FutureWarning, + r"The provided callable is currently using ", + lower="2.0.99", + ): + check( + assert_type(GB_DF.resample(M).aggregate(np.sum), DataFrame), DataFrame + ) + check(assert_type(GB_DF.resample(M).agg(np.sum), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).apply(np.sum), DataFrame), DataFrame) + check( + assert_type(GB_DF.resample(M).aggregate([np.sum, np.mean]), DataFrame), DataFrame, - ), - DataFrame, - ) - check( - assert_type( - GB_DF.resample(M).aggregate( - {"col1": ["sum", np.mean], "col2": np.mean} + ) + check( + assert_type(GB_DF.resample(M).aggregate(["sum", np.mean]), DataFrame), + DataFrame, + ) + check( + assert_type( + GB_DF.resample(M).aggregate({"col1": "sum", "col2": np.mean}), + DataFrame, ), DataFrame, - ), - DataFrame, - ) + ) + check( + assert_type( + GB_DF.resample(M).aggregate( + {"col1": ["sum", np.mean], "col2": np.mean} + ), + DataFrame, + ), + DataFrame, + ) def f(val: DataFrame) -> Series: return val.mean() - check(assert_type(GB_DF.resample(M).aggregate(f), DataFrame), DataFrame) + with pytest_warns_bounded( + DeprecationWarning, + "DataFrameGroupBy.(apply|resample) operated on the grouping columns", + ): + check(assert_type(GB_DF.resample(M).aggregate(f), DataFrame), DataFrame) # aggregate combinations def df2frame(val: DataFrame) -> DataFrame: @@ -166,88 +180,96 @@ def df2scalar(val: DataFrame) -> float: return float(val.mean().mean()) with pytest_warns_bounded( - FutureWarning, - r"The provided callable is currently using ", - lower="2.0.99", + DeprecationWarning, + "DataFrameGroupBy.(apply|resample) operated on the grouping columns", ): - check(GB_DF.resample(M).aggregate(np.sum), DataFrame) - check(GB_DF.resample(M).aggregate([np.mean]), DataFrame) - check(GB_DF.resample(M).aggregate(["sum", np.mean]), DataFrame) - check(GB_DF.resample(M).aggregate({"col1": np.sum}), DataFrame) + with pytest_warns_bounded( + FutureWarning, + r"The provided callable is currently using ", + lower="2.0.99", + ): + check(GB_DF.resample(M).aggregate(np.sum), DataFrame) + check(GB_DF.resample(M).aggregate([np.mean]), DataFrame) + check(GB_DF.resample(M).aggregate(["sum", np.mean]), DataFrame) + check(GB_DF.resample(M).aggregate({"col1": np.sum}), DataFrame) + check( + GB_DF.resample(M).aggregate({"col1": np.sum, "col2": np.mean}), + DataFrame, + ) + check( + GB_DF.resample(M).aggregate( + {"col1": [np.sum], "col2": ["sum", np.mean]} + ), + DataFrame, + ) + check( + GB_DF.resample(M).aggregate({"col1": np.sum, "col2": ["sum", np.mean]}), + DataFrame, + ) + check( + GB_DF.resample(M).aggregate({"col1": "sum", "col2": [np.mean]}), + DataFrame, + ) + check(GB_DF.resample(M).aggregate("sum"), DataFrame) + check(GB_DF.resample(M).aggregate(df2frame), DataFrame) + check(GB_DF.resample(M).aggregate(df2series), DataFrame) + check(GB_DF.resample(M).aggregate(df2scalar), DataFrame) + + # asfreq + check(assert_type(GB_DF.resample(M).asfreq(-1.0), DataFrame), DataFrame) + + # getattr check( - GB_DF.resample(M).aggregate({"col1": np.sum, "col2": np.mean}), - DataFrame, + assert_type(GB_DF.resample(M).col1, "_ResamplerGroupBy[DataFrame]"), + DatetimeIndexResamplerGroupby, ) + + # getitem check( - GB_DF.resample(M).aggregate({"col1": [np.sum], "col2": ["sum", np.mean]}), - DataFrame, + assert_type(GB_DF.resample(M)["col1"], "_ResamplerGroupBy[DataFrame]"), + DatetimeIndexResamplerGroupby, ) check( - GB_DF.resample(M).aggregate({"col1": np.sum, "col2": ["sum", np.mean]}), - DataFrame, + assert_type( + GB_DF.resample(M)[["col1", "col2"]], "_ResamplerGroupBy[DataFrame]" + ), + DatetimeIndexResamplerGroupby, ) + + # interpolate + check(assert_type(GB_DF.resample(M).interpolate(), DataFrame), DataFrame) check( - GB_DF.resample(M).aggregate({"col1": "sum", "col2": [np.mean]}), + assert_type(GB_DF.resample(M).interpolate(method="linear"), DataFrame), DataFrame, ) - check(GB_DF.resample(M).aggregate("sum"), DataFrame) - check(GB_DF.resample(M).aggregate(df2frame), DataFrame) - check(GB_DF.resample(M).aggregate(df2series), DataFrame) - check(GB_DF.resample(M).aggregate(df2scalar), DataFrame) - - # asfreq - check(assert_type(GB_DF.resample(M).asfreq(-1.0), DataFrame), DataFrame) - - # getattr - check( - assert_type(GB_DF.resample(M).col1, "_ResamplerGroupBy[DataFrame]"), - DatetimeIndexResamplerGroupby, - ) - - # getitem - check( - assert_type(GB_DF.resample(M)["col1"], "_ResamplerGroupBy[DataFrame]"), - DatetimeIndexResamplerGroupby, - ) - check( - assert_type( - GB_DF.resample(M)[["col1", "col2"]], "_ResamplerGroupBy[DataFrame]" - ), - DatetimeIndexResamplerGroupby, - ) - - # interpolate - check(assert_type(GB_DF.resample(M).interpolate(), DataFrame), DataFrame) - check( - assert_type(GB_DF.resample(M).interpolate(method="linear"), DataFrame), - DataFrame, - ) - check(assert_type(GB_DF.resample(M).interpolate(inplace=True), None), type(None)) + check( + assert_type(GB_DF.resample(M).interpolate(inplace=True), None), type(None) + ) - # pipe - def g(val: Resampler[DataFrame]) -> DataFrame: - assert isinstance(val, Resampler) - return val.mean() + # pipe + def g(val: Resampler[DataFrame]) -> DataFrame: + assert isinstance(val, Resampler) + return val.mean() - check(assert_type(GB_DF.resample(M).pipe(g), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).pipe(g), DataFrame), DataFrame) - def h(val: Resampler[DataFrame]) -> Series: - assert isinstance(val, Resampler) - return val.mean().mean() + def h(val: Resampler[DataFrame]) -> Series: + assert isinstance(val, Resampler) + return val.mean().mean() - check(assert_type(GB_DF.resample(M).pipe(h), Series), Series) + check(assert_type(GB_DF.resample(M).pipe(h), Series), Series) - def i(val: Resampler[DataFrame]) -> float: - assert isinstance(val, Resampler) - return float(val.mean().mean().mean()) + def i(val: Resampler[DataFrame]) -> float: + assert isinstance(val, Resampler) + return float(val.mean().mean().mean()) - check(assert_type(GB_DF.resample(M).pipe(i), float), float) + check(assert_type(GB_DF.resample(M).pipe(i), float), float) - # transform - def j(val: Series) -> Series: - return -1 * val + # transform + def j(val: Series) -> Series: + return -1 * val - check(assert_type(GB_DF.resample(M).transform(j), DataFrame), DataFrame) + check(assert_type(GB_DF.resample(M).transform(j), DataFrame), DataFrame) def test_series_groupby_resample() -> None: diff --git a/tests/test_indexes.py b/tests/test_indexes.py index 24d21a091..e1d7b3b92 100644 --- a/tests/test_indexes.py +++ b/tests/test_indexes.py @@ -740,39 +740,48 @@ def test_index_operators() -> None: if TYPE_CHECKING_INVALID_USAGE: assert_type( - i1 & i2, # type:ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + i1 + & i2, # type:ignore[operator] # pyright: ignore[reportAssertTypeFailure,reportOperatorIssue] Never, ) assert_type( # type: ignore[assert-type] - i1 & 10, # type:ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + i1 + & 10, # type:ignore[operator] # pyright: ignore[reportAssertTypeFailure,reportOperatorIssue] Never, ) assert_type( # type: ignore[assert-type] - 10 & i1, # type:ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + 10 + & i1, # type:ignore[operator] # pyright: ignore[reportAssertTypeFailure,reportOperatorIssue] Never, ) assert_type( - i1 | i2, # type:ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + i1 + | i2, # type:ignore[operator] # pyright: ignore[reportAssertTypeFailure,reportOperatorIssue] Never, ) assert_type( # type: ignore[assert-type] - i1 | 10, # type:ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + i1 + | 10, # type:ignore[operator] # pyright: ignore[reportAssertTypeFailure,reportOperatorIssue] Never, ) assert_type( # type: ignore[assert-type] - 10 | i1, # type:ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + 10 + | i1, # type:ignore[operator] # pyright: ignore[reportAssertTypeFailure,reportOperatorIssue] Never, ) assert_type( - i1 ^ i2, # type:ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + i1 + ^ i2, # type:ignore[operator] # pyright: ignore[reportAssertTypeFailure,reportOperatorIssue] Never, ) assert_type( # type: ignore[assert-type] - i1 ^ 10, # type:ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + i1 + ^ 10, # type:ignore[operator] # pyright: ignore[reportAssertTypeFailure,reportOperatorIssue] Never, ) assert_type( # type: ignore[assert-type] - 10 ^ i1, # type:ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + 10 + ^ i1, # type:ignore[operator] # pyright: ignore[reportAssertTypeFailure,reportOperatorIssue] Never, ) @@ -1044,10 +1053,10 @@ def test_timedelta_div() -> None: check(assert_type([delta] // index, "pd.Index[int]"), pd.Index, np.signedinteger) if TYPE_CHECKING_INVALID_USAGE: - 1 / index # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - [1] / index # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - 1 // index # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - [1] // index # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + 1 / index # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + [1] / index # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + 1 // index # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + [1] // index # type: ignore[operator] # pyright: ignore[reportOperatorIssue] def test_datetime_operators_builtin() -> None: diff --git a/tests/test_interval.py b/tests/test_interval.py index 7bdc8f4b1..a7cff54c7 100644 --- a/tests/test_interval.py +++ b/tests/test_interval.py @@ -52,10 +52,10 @@ def test_interval_length() -> None: check(assert_type(idres, "pd.Interval[pd.Timestamp]"), pd.Interval, pd.Timestamp) if TYPE_CHECKING_INVALID_USAGE: - 20 in i1 # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - i1 + pd.Timestamp("2000-03-03") # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - i1 * 3 # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - i1 * pd.Timedelta(seconds=20) # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + 20 in i1 # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + i1 + pd.Timestamp("2000-03-03") # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + i1 * 3 # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + i1 * pd.Timedelta(seconds=20) # type: ignore[operator] # pyright: ignore[reportOperatorIssue] i2 = pd.Interval(10, 20) check(assert_type(i2.length, int), int) @@ -71,8 +71,8 @@ def test_interval_length() -> None: check(assert_type(i2 * 4.2, "pd.Interval[float]"), pd.Interval, float) if TYPE_CHECKING_INVALID_USAGE: - pd.Timestamp("2001-01-02") in i2 # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - i2 + pd.Timedelta(seconds=20) # type: ignore[type-var] # pyright: ignore[reportGeneralTypeIssues] + pd.Timestamp("2001-01-02") in i2 # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + i2 + pd.Timedelta(seconds=20) # type: ignore[type-var] # pyright: ignore[reportOperatorIssue] i3 = pd.Interval(13.2, 19.5) check(assert_type(i3.length, float), float) check(assert_type(i3.left, float), float) @@ -84,8 +84,8 @@ def test_interval_length() -> None: check(assert_type(i3 + 3, "pd.Interval[float]"), pd.Interval, float) check(assert_type(i3 * 3, "pd.Interval[float]"), pd.Interval, float) if TYPE_CHECKING_INVALID_USAGE: - pd.Timestamp("2001-01-02") in i3 # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - i3 + pd.Timedelta(seconds=20) # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + pd.Timestamp("2001-01-02") in i3 # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + i3 + pd.Timedelta(seconds=20) # type: ignore[operator] # pyright: ignore[reportOperatorIssue] def test_interval_array_contains(): diff --git a/tests/test_io.py b/tests/test_io.py index 71c069e29..e4f309a11 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -14,7 +14,6 @@ ) import numpy as np -import numpy.typing as npt import pandas as pd from pandas import ( DataFrame, @@ -75,12 +74,7 @@ def test_orc(): with ensure_clean() as path: check(assert_type(DF.to_orc(path), None), type(None)) - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): - check(assert_type(read_orc(path), DataFrame), DataFrame) + check(assert_type(read_orc(path), DataFrame), DataFrame) @pytest.mark.skipif(WINDOWS, reason="ORC not available on windows") @@ -88,12 +82,7 @@ def test_orc_path(): with ensure_clean() as path: pathlib_path = Path(path) check(assert_type(DF.to_orc(pathlib_path), None), type(None)) - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): - check(assert_type(read_orc(pathlib_path), DataFrame), DataFrame) + check(assert_type(read_orc(pathlib_path), DataFrame), DataFrame) @pytest.mark.skipif(WINDOWS, reason="ORC not available on windows") @@ -103,24 +92,14 @@ def test_orc_buffer(): check(assert_type(DF.to_orc(file_w), None), type(None)) with open(path, "rb") as file_r: - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): - check(assert_type(read_orc(file_r), DataFrame), DataFrame) + check(assert_type(read_orc(file_r), DataFrame), DataFrame) @pytest.mark.skipif(WINDOWS, reason="ORC not available on windows") def test_orc_columns(): with ensure_clean() as path: check(assert_type(DF.to_orc(path, index=False), None), type(None)) - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): - check(assert_type(read_orc(path, columns=["a"]), DataFrame), DataFrame) + check(assert_type(read_orc(path, columns=["a"]), DataFrame), DataFrame) @pytest.mark.skipif(WINDOWS, reason="ORC not available on windows") @@ -298,8 +277,8 @@ def test_clipboard(): DataFrame, ) if TYPE_CHECKING_INVALID_USAGE: - pd.read_clipboard(names="abcd") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] - pd.read_clipboard(usecols="abcd") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + pd.read_clipboard(names="abcd") # type: ignore[call-overload] # pyright: ignore[reportArgumentType] + pd.read_clipboard(usecols="abcd") # type: ignore[call-overload] # pyright: ignore[reportArgumentType] def test_clipboard_iterator(): @@ -371,12 +350,7 @@ def test_sas_xport() -> None: def test_hdf(): with ensure_clean() as path: - with pytest_warns_bounded( - FutureWarning, - r".*all arguments of to_hdf except for the argument 'path_or_buf' will be keyword-only", - lower="2.1.99", - ): - check(assert_type(DF.to_hdf(path, "df"), None), type(None)) + check(assert_type(DF.to_hdf(path, key="df"), None), type(None)) check(assert_type(read_hdf(path), Union[DataFrame, Series]), DataFrame) @@ -425,12 +399,7 @@ def test_hdfstore() -> None: def test_read_hdf_iterator() -> None: with ensure_clean() as path: - with pytest_warns_bounded( - FutureWarning, - r".*all arguments of to_hdf except for the argument 'path_or_buf' will be keyword-only", - lower="2.1.99", - ): - check(assert_type(DF.to_hdf(path, "df", format="table"), None), type(None)) + check(assert_type(DF.to_hdf(path, key="df", format="table"), None), type(None)) ti = read_hdf(path, chunksize=1) check(assert_type(ti, TableIterator), TableIterator) ti.close() @@ -444,12 +413,7 @@ def test_read_hdf_iterator() -> None: def test_hdf_context_manager() -> None: with ensure_clean() as path: - with pytest_warns_bounded( - FutureWarning, - r".*all arguments of to_hdf except for the argument 'path_or_buf' will be keyword-only", - lower="2.1.99", - ): - check(assert_type(DF.to_hdf(path, "df", format="table"), None), type(None)) + check(assert_type(DF.to_hdf(path, key="df", format="table"), None), type(None)) with HDFStore(path, mode="r") as store: check(assert_type(store.is_open, bool), bool) check(assert_type(store.get("df"), Union[DataFrame, Series]), DataFrame) @@ -458,12 +422,7 @@ def test_hdf_context_manager() -> None: def test_hdf_series(): s = DF["a"] with ensure_clean() as path: - with pytest_warns_bounded( - FutureWarning, - r".*all arguments of to_hdf except for the argument 'path_or_buf' will be keyword-only", - lower="2.1.99", - ): - check(assert_type(s.to_hdf(path, "s"), None), type(None)) + check(assert_type(s.to_hdf(path, key="s"), None), type(None)) check(assert_type(read_hdf(path, "s"), Union[DataFrame, Series]), Series) @@ -498,45 +457,49 @@ def test_json_series(): check(assert_type(s.to_json(path), None), type(None)) check(assert_type(read_json(path, typ="series"), Series), Series) check(assert_type(DF.to_json(), str), str) - with pytest_warns_bounded( - FutureWarning, - "Passing literal json to 'read_json' is deprecated ", - lower="2.0.99", - ): - check( - assert_type( - read_json(s.to_json(orient=None), typ="series", orient=None), Series - ), + check( + assert_type( + read_json(io.StringIO(s.to_json(orient=None)), typ="series", orient=None), Series, - ) - check( - assert_type( - read_json(s.to_json(orient="split"), typ="series", orient="split"), - Series, + ), + Series, + ) + check( + assert_type( + read_json( + io.StringIO(s.to_json(orient="split")), typ="series", orient="split" ), Series, - ) - check( - assert_type( - read_json(s.to_json(orient="records"), typ="series", orient="records"), - Series, + ), + Series, + ) + check( + assert_type( + read_json( + io.StringIO(s.to_json(orient="records")), typ="series", orient="records" ), Series, - ) - check( - assert_type( - read_json(s.to_json(orient="index"), typ="series", orient="index"), - Series, + ), + Series, + ) + check( + assert_type( + read_json( + io.StringIO(s.to_json(orient="index")), typ="series", orient="index" ), Series, - ) - check( - assert_type( - read_json(s.to_json(orient="table"), typ="series", orient="table"), - Series, + ), + Series, + ) + check( + assert_type( + read_json( + io.StringIO(s.to_json(orient="table")), typ="series", orient="table" ), Series, - ) + ), + Series, + ) def test_json_chunk(): @@ -553,12 +516,7 @@ def test_parquet(): with ensure_clean() as path: check(assert_type(DF.to_parquet(path), None), type(None)) check(assert_type(DF.to_parquet(), bytes), bytes) - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): - check(assert_type(read_parquet(path), DataFrame), DataFrame) + check(assert_type(read_parquet(path), DataFrame), DataFrame) def test_parquet_options(): @@ -567,33 +525,18 @@ def test_parquet_options(): assert_type(DF.to_parquet(path, compression=None, index=True), None), type(None), ) - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): - check(assert_type(read_parquet(path), DataFrame), DataFrame) + check(assert_type(read_parquet(path), DataFrame), DataFrame) def test_feather(): with ensure_clean() as path: check(assert_type(DF.to_feather(path), None), type(None)) - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): - check(assert_type(read_feather(path), DataFrame), DataFrame) - check(assert_type(read_feather(path, columns=["a"]), DataFrame), DataFrame) + check(assert_type(read_feather(path), DataFrame), DataFrame) + check(assert_type(read_feather(path, columns=["a"]), DataFrame), DataFrame) with io.BytesIO() as bio: check(assert_type(DF.to_feather(bio), None), type(None)) bio.seek(0) - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): - check(assert_type(read_feather(bio), DataFrame), DataFrame) + check(assert_type(read_feather(bio), DataFrame), DataFrame) def test_read_csv(): @@ -691,8 +634,8 @@ def test_types_read_csv() -> None: ) if TYPE_CHECKING_INVALID_USAGE: - pd.read_csv(path, names="abcd") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] - pd.read_csv(path, usecols="abcd") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + pd.read_csv(path, names="abcd") # type: ignore[call-overload] # pyright: ignore[reportArgumentType] + pd.read_csv(path, usecols="abcd") # type: ignore[call-overload] # pyright: ignore[reportArgumentType] tfr1: TextFileReader = pd.read_csv(path, nrows=2, iterator=True, chunksize=3) tfr1.close() @@ -825,8 +768,8 @@ def test_read_table(): DataFrame, ) if TYPE_CHECKING_INVALID_USAGE: - pd.read_table(path, names="abcd") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] - pd.read_table(path, usecols="abcd") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + pd.read_table(path, names="abcd") # type: ignore[call-overload] # pyright: ignore[reportArgumentType] + pd.read_table(path, usecols="abcd") # type: ignore[call-overload] # pyright: ignore[reportArgumentType] def test_read_table_iterator(): @@ -1013,7 +956,7 @@ def test_read_excel() -> None: pd.DataFrame, ) if TYPE_CHECKING_INVALID_USAGE: - pd.read_excel(path, names="abcd") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + pd.read_excel(path, names="abcd") # type: ignore[call-overload] # pyright: ignore[reportArgumentType] def test_read_excel_io_types() -> None: @@ -1435,42 +1378,15 @@ def test_all_read_without_lxml_dtype_backend() -> None: if not WINDOWS: check(assert_type(DF.to_orc(path), None), type(None)) - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): - check( - assert_type( - read_orc(path, dtype_backend="numpy_nullable"), DataFrame - ), - DataFrame, - ) - check(assert_type(DF.to_feather(path), None), type(None)) - with pytest_warns_bounded( - DeprecationWarning, - "make_block is deprecated and will be removed", - lower="2.1.99", - ): check( - assert_type(read_feather(path, dtype_backend="pyarrow"), DataFrame), + assert_type(read_orc(path, dtype_backend="numpy_nullable"), DataFrame), DataFrame, ) - - with pytest_warns_bounded( - FutureWarning, "errors='ignore' is deprecated", lower="2.1.99" - ): - check( - assert_type( - pd.to_numeric( - [1.0, 2.0, "blerg"], - errors="ignore", - dtype_backend="numpy_nullable", - ), - npt.NDArray, - ), - np.ndarray, - ) + check(assert_type(DF.to_feather(path), None), type(None)) + check( + assert_type(read_feather(path, dtype_backend="pyarrow"), DataFrame), + DataFrame, + ) with ensure_clean(".xlsx") as path: as_str: str = path diff --git a/tests/test_pandas.py b/tests/test_pandas.py index c94257461..c3760aa0c 100644 --- a/tests/test_pandas.py +++ b/tests/test_pandas.py @@ -563,10 +563,6 @@ def test_to_numeric_scalar() -> None: check(assert_type(pd.to_numeric(1), float), int) check(assert_type(pd.to_numeric("1.2"), float), float) check(assert_type(pd.to_numeric("blerg", errors="coerce"), float), float) - with pytest_warns_bounded( - FutureWarning, "errors='ignore' is deprecated", lower="2.1.99" - ): - check(assert_type(pd.to_numeric("blerg", errors="ignore"), Scalar), str) check(assert_type(pd.to_numeric(1, downcast="signed"), float), int) check(assert_type(pd.to_numeric(1, downcast="unsigned"), float), int) check(assert_type(pd.to_numeric(1, downcast="float"), float), int) @@ -1272,29 +1268,29 @@ def test_merge_ordered() -> None: pd.DataFrame, ) if TYPE_CHECKING_INVALID_USAGE: - pd.merge_ordered( # type: ignore[call-overload] + pd.merge_ordered( # type: ignore[call-overload] # pyright: ignore[reportCallIssue] ls, rs, left_on="left", right_on="right", - left_by="left", # pyright: ignore[reportGeneralTypeIssues] - right_by="right", # pyright: ignore[reportGeneralTypeIssues] + left_by="left", # pyright: ignore[reportArgumentType] + right_by="right", # pyright: ignore[reportArgumentType] ) - pd.merge_ordered( # type: ignore[call-overload] + pd.merge_ordered( # type: ignore[call-overload] # pyright: ignore[reportCallIssue] ls, rf, left_on="left", right_on="b", - left_by="left", # pyright: ignore[reportGeneralTypeIssues] - right_by="b", # pyright: ignore[reportGeneralTypeIssues] + left_by="left", # pyright: ignore[reportArgumentType] + right_by="b", # pyright: ignore[reportArgumentType] ) - pd.merge_ordered( # type: ignore[call-overload] + pd.merge_ordered( # type: ignore[call-overload] # pyright: ignore[reportCallIssue] lf, rs, left_on="a", right_on="right", - left_by="a", # pyright: ignore[reportGeneralTypeIssues] - right_by="right", # pyright: ignore[reportGeneralTypeIssues] + left_by="a", # pyright: ignore[reportArgumentType] + right_by="right", # pyright: ignore[reportArgumentType] ) @@ -1988,7 +1984,7 @@ def g(x: pd.Series) -> int: ), pd.DataFrame, ) - with pytest_warns_bounded(FutureWarning, "'M' is deprecated", lower="2.1.99"): + with pytest_warns_bounded(FutureWarning, "'(M|A)' is deprecated", lower="2.1.99"): check( assert_type( pd.pivot_table( diff --git a/tests/test_resampler.py b/tests/test_resampler.py index 9c7fe0018..75742725c 100644 --- a/tests/test_resampler.py +++ b/tests/test_resampler.py @@ -210,7 +210,7 @@ def j( if TYPE_CHECKING_INVALID_USAGE: DF.resample(MonthFreq).pipe( j, - "a", # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + "a", # type: ignore[arg-type] # pyright: ignore[reportArgumentType,reportCallIssue] [1.0, 2.0], arg2="hi", kw=(1,), @@ -218,7 +218,7 @@ def j( DF.resample(MonthFreq).pipe( j, 1, - [1.0, "b"], # type: ignore[list-item] # pyright: ignore[reportGeneralTypeIssues] + [1.0, "b"], # type: ignore[list-item] # pyright: ignore[reportArgumentType,reportCallIssue] arg2="hi", kw=(1,), ) @@ -226,7 +226,7 @@ def j( j, 1, [1.0], - arg2=11, # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + arg2=11, # type: ignore[arg-type] # pyright: ignore[reportArgumentType,reportCallIssue] kw=(1,), ) DF.resample(MonthFreq).pipe( @@ -234,13 +234,13 @@ def j( 1, [1.0], arg2="hi", - kw=(1, 2), # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + kw=(1, 2), # type: ignore[arg-type] # pyright: ignore[reportArgumentType,reportCallIssue] ) DF.resample(MonthFreq).pipe( # type: ignore[call-arg] j, 1, [1.0], - arg3="hi", # pyright: ignore[reportGeneralTypeIssues] + arg3="hi", # pyright: ignore[reportCallIssue] kw=(1,), ) DF.resample(MonthFreq).pipe( # type: ignore[misc] @@ -248,11 +248,11 @@ def j( 1, [1.0], 11, # type: ignore[arg-type] - (1,), # pyright: ignore[reportGeneralTypeIssues] + (1,), # pyright: ignore[reportCallIssue] ) DF.resample(MonthFreq).pipe( # type: ignore[call-arg] j, - pos=1, # pyright: ignore[reportGeneralTypeIssues] + pos=1, # pyright: ignore[reportCallIssue] arg1=[1.0], arg2=11, # type: ignore[arg-type] kw=(1,), @@ -265,8 +265,8 @@ def k(x: int, t: "DatetimeIndexResampler[DataFrame]") -> DataFrame: check(assert_type(DF.resample(MonthFreq).pipe((k, "t"), 1), DataFrame), DataFrame) if TYPE_CHECKING_INVALID_USAGE: - DF.resample(MonthFreq).pipe( - (k, 1), # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + DF.resample(MonthFreq).pipe( # pyright: ignore[reportCallIssue] + (k, 1), # type: ignore[arg-type] # pyright: ignore[reportArgumentType] 1, ) diff --git a/tests/test_scalars.py b/tests/test_scalars.py index 21e70ff6c..478aab2ee 100644 --- a/tests/test_scalars.py +++ b/tests/test_scalars.py @@ -590,7 +590,7 @@ def test_timedelta_add_sub() -> None: # https://github.com/microsoft/pyright/issues/4088 check( assert_type( - as_dt_timedelta + td, # pyright: ignore[reportGeneralTypeIssues] + as_dt_timedelta + td, # pyright: ignore[reportAssertTypeFailure] pd.Timedelta, ), pd.Timedelta, @@ -617,14 +617,14 @@ def test_timedelta_add_sub() -> None: # TypeError: as_period, as_timestamp, as_datetime, as_date, as_datetime64, # as_period_index, as_datetime_index, as_ndarray_dt64 if TYPE_CHECKING_INVALID_USAGE: - td - as_period # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - td - as_timestamp # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - td - as_datetime # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - td - as_date # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - td - as_datetime64 # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - td - as_period_index # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - td - as_datetime_index # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - td - as_ndarray_dt64 # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + td - as_period # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + td - as_timestamp # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + td - as_datetime # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + td - as_date # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + td - as_datetime64 # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + td - as_period_index # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + td - as_datetime_index # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + td - as_ndarray_dt64 # type: ignore[operator] # pyright: ignore[reportOperatorIssue] check(assert_type(td - td, pd.Timedelta), pd.Timedelta) check(assert_type(td - as_dt_timedelta, pd.Timedelta), pd.Timedelta) @@ -649,7 +649,7 @@ def test_timedelta_add_sub() -> None: # https://github.com/microsoft/pyright/issues/4088 check( assert_type( - as_dt_timedelta - td, # pyright: ignore[reportGeneralTypeIssues] + as_dt_timedelta - td, # pyright: ignore[reportAssertTypeFailure] pd.Timedelta, ), pd.Timedelta, @@ -754,14 +754,14 @@ def test_timedelta_mul_div() -> None: # TypeError: md_int, md_float, md_ndarray_intp, md_ndarray_float, mp_series_int, # mp_series_float, md_int64_index, md_float_index if TYPE_CHECKING_INVALID_USAGE: - md_int // td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_float // td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_ndarray_intp // td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_ndarray_float // td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - mp_series_int // td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_series_float // td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_int64_index // td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_float_index // td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + md_int // td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_float // td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_ndarray_intp // td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_ndarray_float // td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + mp_series_int // td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_series_float // td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_int64_index // td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_float_index // td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] check(assert_type(td / td, float), float) check(assert_type(td / pd.NaT, float), float) @@ -788,14 +788,14 @@ def test_timedelta_mul_div() -> None: # TypeError: md_int, md_float, md_ndarray_intp, md_ndarray_float, mp_series_int, # mp_series_float, md_int64_index, md_float_index if TYPE_CHECKING_INVALID_USAGE: - md_int / td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_float / td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_ndarray_intp / td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_ndarray_float / td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - mp_series_int / td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_series_float / td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_int64_index / td, # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - md_float_index / td, # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + md_int / td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_float / td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_ndarray_intp / td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_ndarray_float / td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + mp_series_int / td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_series_float / td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_int64_index / td, # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + md_float_index / td, # type: ignore[operator] # pyright: ignore[reportOperatorIssue] def test_timedelta_mod_abs_unary() -> None: diff --git a/tests/test_series.py b/tests/test_series.py index 1c7e8ff17..96e644a22 100644 --- a/tests/test_series.py +++ b/tests/test_series.py @@ -275,23 +275,18 @@ def test_types_fillna() -> None: s = pd.Series([1.0, np.nan, np.nan, 3.0]) check(assert_type(s.fillna(0), "pd.Series[float]"), pd.Series, float) check(assert_type(s.fillna(0, axis="index"), "pd.Series[float]"), pd.Series, float) - with pytest_warns_bounded( - FutureWarning, - "Series.fillna with 'method' is deprecated", - lower="2.0.99", - ): - check( - assert_type(s.fillna(method="backfill", axis=0), "pd.Series[float]"), - pd.Series, - float, - ) - assert assert_type(s.fillna(method="bfill", inplace=True), None) is None - check(assert_type(s.fillna(method="pad"), "pd.Series[float]"), pd.Series, float) - check( - assert_type(s.fillna(method="ffill", limit=1), "pd.Series[float]"), - pd.Series, - float, - ) + check( + assert_type(s.fillna(0, axis=0), "pd.Series[float]"), + pd.Series, + float, + ) + assert assert_type(s.fillna(0, inplace=True), None) is None + check(assert_type(s.fillna(0), "pd.Series[float]"), pd.Series, float) + check( + assert_type(s.fillna(0, limit=1), "pd.Series[float]"), + pd.Series, + float, + ) # GH 263 check(assert_type(s.fillna(pd.NA), "pd.Series[float]"), pd.Series, float) @@ -322,7 +317,7 @@ def test_types_sort_values() -> None: s = pd.Series([4, 2, 1, 3]) check(assert_type(s.sort_values(), "pd.Series[int]"), pd.Series, np.integer) if TYPE_CHECKING_INVALID_USAGE: - check(assert_type(s.sort_values(0), pd.Series), pd.Series) # type: ignore[assert-type,call-overload] # pyright: ignore[reportGeneralTypeIssues] + check(assert_type(s.sort_values(0), pd.Series), pd.Series) # type: ignore[assert-type,call-overload] # pyright: ignore[reportAssertTypeFailure,reportCallIssue] check(assert_type(s.sort_values(axis=0), "pd.Series[int]"), pd.Series, np.integer) check( assert_type(s.sort_values(ascending=False), "pd.Series[int]"), @@ -858,9 +853,9 @@ def test_types_window() -> None: s.expanding() s.rolling(2, center=True) if TYPE_CHECKING_INVALID_USAGE: - s.expanding(axis=0) # type: ignore[call-arg] # pyright: ignore[reportGeneralTypeIssues] - s.rolling(2, axis=0, center=True) # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] - s.expanding(axis=0, center=True) # type: ignore[call-arg] # pyright: ignore[reportGeneralTypeIssues] + s.expanding(axis=0) # type: ignore[call-arg] # pyright: ignore[reportCallIssue] + s.rolling(2, axis=0, center=True) # type: ignore[call-overload] # pyright: ignore[reportCallIssue] + s.expanding(axis=0, center=True) # type: ignore[call-arg] # pyright: ignore[reportCallIssue] s.rolling(2) @@ -902,7 +897,7 @@ def test_update() -> None: # Series.update() accepting objects that can be coerced to a Series was added in 1.1.0 https://pandas.pydata.org/docs/whatsnew/v1.1.0.html s1.update([1, 2, -4, 3]) if TYPE_CHECKING_INVALID_USAGE: - s1.update([1, "b", "c", "d"]) # type: ignore[list-item] # pyright: ignore[reportGeneralTypeIssues] + s1.update([1, "b", "c", "d"]) # type: ignore[list-item] # pyright: ignore[reportArgumentType] s1.update({1: 9, 3: 4}) @@ -1078,7 +1073,7 @@ def add1(x: int) -> int: s6: None = pd.Series([1, 2, 3]).rename("A", inplace=True) if TYPE_CHECKING_INVALID_USAGE: - s7 = pd.Series([1, 2, 3]).rename({1: [3, 4, 5]}) # type: ignore[dict-item] # pyright: ignore[reportGeneralTypeIssues] + s7 = pd.Series([1, 2, 3]).rename({1: [3, 4, 5]}) # type: ignore[dict-item] # pyright: ignore[reportArgumentType,reportCallIssue] def test_types_ne() -> None: @@ -1099,7 +1094,7 @@ def test_types_ewm() -> None: if TYPE_CHECKING_INVALID_USAGE: check( assert_type( - s1.ewm(com=0.3, min_periods=0, adjust=False, ignore_na=True, axis=0), # type: ignore[call-arg] # pyright: ignore[reportGeneralTypeIssues] + s1.ewm(com=0.3, min_periods=0, adjust=False, ignore_na=True, axis=0), # type: ignore[call-arg] # pyright: ignore[reportAssertTypeFailure,reportCallIssue] "ExponentialMovingWindow[pd.Series]", ), ExponentialMovingWindow, @@ -2629,7 +2624,7 @@ def test_astype_other() -> None: # Test incorrect Literal if TYPE_CHECKING_INVALID_USAGE: - s.astype("foobar") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + s.astype("foobar") # type: ignore[call-overload] # pyright: ignore[reportArgumentType,reportCallIssue] # Test self-consistent with s.dtype (#747) # NOTE: https://github.com/python/typing/issues/801#issuecomment-1646171898 @@ -2729,8 +2724,8 @@ def test_prefix_summix_axis() -> None: ) if TYPE_CHECKING_INVALID_USAGE: - s.add_prefix("_item", axis=1) # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] - s.add_suffix("_item", axis="columns") # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + s.add_prefix("_item", axis=1) # type: ignore[arg-type] # pyright: ignore[reportArgumentType] + s.add_suffix("_item", axis="columns") # type: ignore[arg-type] # pyright: ignore[reportArgumentType] def test_convert_dtypes_convert_floating() -> None: @@ -2768,7 +2763,7 @@ def test_to_json_mode() -> None: check(assert_type(result2, str), str) check(assert_type(result4, str), str) if TYPE_CHECKING_INVALID_USAGE: - result3 = s.to_json(orient="records", lines=False, mode="a") # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + result3 = s.to_json(orient="records", lines=False, mode="a") # type: ignore[call-overload] # pyright: ignore[reportArgumentType,reportCallIssue] def test_groupby_diff() -> None: @@ -2831,10 +2826,10 @@ def test_timedelta_div() -> None: check(assert_type([delta] // series, "pd.Series[int]"), pd.Series, np.signedinteger) if TYPE_CHECKING_INVALID_USAGE: - 1 / series # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - [1] / series # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - 1 // series # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - [1] // series # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + 1 / series # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + [1] / series # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + 1 // series # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + [1] // series # type: ignore[operator] # pyright: ignore[reportOperatorIssue] def test_rank() -> None: @@ -2969,7 +2964,7 @@ def first_arg_series( if TYPE_CHECKING_INVALID_USAGE: ser.pipe( first_arg_series, - "a", # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + "a", # type: ignore[arg-type] # pyright: ignore[reportArgumentType,reportCallIssue] [1.0, 2.0], argument_2="hi", keyword_only=(1, 2), @@ -2977,7 +2972,7 @@ def first_arg_series( ser.pipe( first_arg_series, 1, - [1.0, "b"], # type: ignore[list-item] # pyright: ignore[reportGeneralTypeIssues] + [1.0, "b"], # type: ignore[list-item] # pyright: ignore[reportArgumentType,reportCallIssue] argument_2="hi", keyword_only=(1, 2), ) @@ -2985,7 +2980,7 @@ def first_arg_series( first_arg_series, 1, [1.0, 2.0], - argument_2=11, # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + argument_2=11, # type: ignore[arg-type] # pyright: ignore[reportArgumentType,reportCallIssue] keyword_only=(1, 2), ) ser.pipe( @@ -2993,13 +2988,13 @@ def first_arg_series( 1, [1.0, 2.0], argument_2="hi", - keyword_only=(1,), # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + keyword_only=(1,), # type: ignore[arg-type] # pyright: ignore[reportArgumentType,reportCallIssue] ) ser.pipe( # type: ignore[call-arg] first_arg_series, 1, [1.0, 2.0], - argument_3="hi", # pyright: ignore[reportGeneralTypeIssues] + argument_3="hi", # pyright: ignore[reportCallIssue] keyword_only=(1, 2), ) ser.pipe( # type: ignore[misc] @@ -3007,11 +3002,11 @@ def first_arg_series( 1, [1.0, 2.0], 11, # type: ignore[arg-type] - (1, 2), # pyright: ignore[reportGeneralTypeIssues] + (1, 2), # pyright: ignore[reportCallIssue] ) ser.pipe( # type: ignore[call-arg] first_arg_series, - positional_only=1, # pyright: ignore[reportGeneralTypeIssues] + positional_only=1, # pyright: ignore[reportCallIssue] argument_1=[1.0, 2.0], argument_2=11, # type: ignore[arg-type] keyword_only=(1, 2), @@ -3035,13 +3030,13 @@ def first_arg_not_series(argument_1: int, ser: pd.Series) -> pd.Series: ser.pipe( ( first_arg_not_series, # type: ignore[arg-type] - 1, # pyright: ignore[reportGeneralTypeIssues] + 1, # pyright: ignore[reportArgumentType,reportCallIssue] ), 1, ) ser.pipe( ( - 1, # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + 1, # type: ignore[arg-type] # pyright: ignore[reportArgumentType,reportCallIssue] "df", ), 1, diff --git a/tests/test_testing.py b/tests/test_testing.py index 70defa6f6..00566a8a9 100644 --- a/tests/test_testing.py +++ b/tests/test_testing.py @@ -29,7 +29,7 @@ def test_types_assert_series_equal() -> None: check_datetimelike_compat=True, ) if TYPE_CHECKING_INVALID_USAGE: - assert_series_equal( # type: ignore[call-overload] # pyright: ignore[reportGeneralTypeIssues] + assert_series_equal( # type: ignore[call-overload] # pyright: ignore[reportCallIssue] s1, s2, check_dtype=True, diff --git a/tests/test_timefuncs.py b/tests/test_timefuncs.py index 2bfad7d1b..eeb177b7c 100644 --- a/tests/test_timefuncs.py +++ b/tests/test_timefuncs.py @@ -258,9 +258,9 @@ def test_fail_on_adding_two_timestamps() -> None: s1 = pd.Series(pd.to_datetime(["2022-05-01", "2022-06-01"])) s2 = pd.Series(pd.to_datetime(["2022-05-15", "2022-06-15"])) if TYPE_CHECKING_INVALID_USAGE: - ssum: pd.Series = s1 + s2 # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + ssum: pd.Series = s1 + s2 # type: ignore[operator] # pyright: ignore[reportOperatorIssue] ts = pd.Timestamp("2022-06-30") - tsum: pd.Series = s1 + ts # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + tsum: pd.Series = s1 + ts # type: ignore[operator] # pyright: ignore[reportOperatorIssue] def test_dtindex_tzinfo() -> None: @@ -1169,9 +1169,9 @@ def test_timedelta64_and_arithmatic_operator() -> None: check(assert_type((s3 + td), "TimedeltaSeries"), pd.Series, pd.Timedelta) check(assert_type((s3 / td), "pd.Series[float]"), pd.Series, float) if TYPE_CHECKING_INVALID_USAGE: - r1 = s1 * td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - r2 = s1 / td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] - r3 = s3 * td # type: ignore[operator] # pyright: ignore[reportGeneralTypeIssues] + r1 = s1 * td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + r2 = s1 / td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] + r3 = s3 * td # type: ignore[operator] # pyright: ignore[reportOperatorIssue] def test_timedeltaseries_add_timestampseries() -> None: @@ -1203,8 +1203,8 @@ def test_timestamp_strptime_fails(): if TYPE_CHECKING_INVALID_USAGE: assert_never( pd.Timestamp.strptime( - "2023-02-16", # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] - "%Y-%M-%D", # type: ignore[arg-type] # pyright: ignore[reportGeneralTypeIssues] + "2023-02-16", # type: ignore[arg-type] # pyright: ignore[reportArgumentType] + "%Y-%M-%D", # type: ignore[arg-type] # pyright: ignore[reportArgumentType] ) )