Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CLN: Remove trailing commas #36295

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 14 additions & 70 deletions pandas/tests/series/methods/test_interpolate.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@
"from_derivatives",
"pchip",
"akima",
"cubicspline",
]
"cubicspline"]
)
def nontemporal_method(request):
"""Fixture that returns an (method name, required kwargs) pair.
Expand Down Expand Up @@ -56,8 +55,7 @@ def nontemporal_method(request):
"from_derivatives",
"pchip",
"akima",
"cubicspline",
]
"cubicspline"]
)
def interp_methods_ind(request):
"""Fixture that returns a (method name, required kwargs) pair to
Expand Down Expand Up @@ -104,10 +102,7 @@ def test_interpolate_cubicspline(self):

ser = Series([10, 11, 12, 13])

expected = Series(
[11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00],
index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
)
expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]))
# interpolate at new_index
new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
float
Expand All @@ -134,21 +129,15 @@ def test_interpolate_akima(self):
ser = Series([10, 11, 12, 13])

# interpolate at new_index where `der` is zero
expected = Series(
[11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00],
index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
)
expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]))
new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
float
)
interp_s = ser.reindex(new_index).interpolate(method="akima")
tm.assert_series_equal(interp_s[1:3], expected)

# interpolate at new_index where `der` is a non-zero int
expected = Series(
[11.0, 1.0, 1.0, 1.0, 12.0, 1.0, 1.0, 1.0, 13.0],
index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
)
expected = Series([11.0, 1.0, 1.0, 1.0, 12.0, 1.0, 1.0, 1.0, 13.0], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]))
new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
float
)
Expand All @@ -159,10 +148,7 @@ def test_interpolate_akima(self):
def test_interpolate_piecewise_polynomial(self):
ser = Series([10, 11, 12, 13])

expected = Series(
[11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00],
index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
)
expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]))
# interpolate at new_index
new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
float
Expand All @@ -174,10 +160,7 @@ def test_interpolate_piecewise_polynomial(self):
def test_interpolate_from_derivatives(self):
ser = Series([10, 11, 12, 13])

expected = Series(
[11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00],
index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
)
expected = Series([11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00], index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]))
# interpolate at new_index
new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
float
Expand All @@ -187,13 +170,7 @@ def test_interpolate_from_derivatives(self):

@pytest.mark.parametrize(
"kwargs",
[
{},
pytest.param(
{"method": "polynomial", "order": 1}, marks=td.skip_if_no_scipy
),
],
)
[{}, pytest.param({"method": "polynomial", "order": 1}, marks=td.skip_if_no_scipy)])
def test_interpolate_corners(self, kwargs):
s = Series([np.nan, np.nan])
tm.assert_series_equal(s.interpolate(**kwargs), s)
Expand Down Expand Up @@ -233,15 +210,7 @@ def test_interpolate_non_ts(self):
with pytest.raises(ValueError, match=msg):
s.interpolate(method="time")

@pytest.mark.parametrize(
"kwargs",
[
{},
pytest.param(
{"method": "polynomial", "order": 1}, marks=td.skip_if_no_scipy
),
],
)
@pytest.mark.parametrize("kwargs",[{}, pytest.param({"method": "polynomial", "order": 1}, marks=td.skip_if_no_scipy)])
def test_nan_interpolate(self, kwargs):
s = Series([0, 1, np.nan, 3])
result = s.interpolate(**kwargs)
Expand Down Expand Up @@ -431,17 +400,7 @@ def test_interp_limit_area(self):

@pytest.mark.parametrize(
"method, limit_direction, expected",
[
("pad", "backward", "forward"),
("ffill", "backward", "forward"),
("backfill", "forward", "backward"),
("bfill", "forward", "backward"),
("pad", "both", "forward"),
("ffill", "both", "forward"),
("backfill", "both", "backward"),
("bfill", "both", "backward"),
],
)
[("pad", "backward", "forward"), ("ffill", "backward", "forward"), ("backfill", "forward", "backward"), ("bfill", "forward", "backward"), ("pad", "both", "forward"), ("ffill", "both", "forward"), ("backfill", "both", "backward"), ("bfill", "both", "backward")])
def test_interp_limit_direction_raises(self, method, limit_direction, expected):
# https://github.com/pandas-dev/pandas/pull/34746
s = Series([1, 2, 3])
Expand Down Expand Up @@ -540,14 +499,9 @@ def test_interp_nonmono_raise(self):
@td.skip_if_no_scipy
@pytest.mark.parametrize("method", ["nearest", "pad"])
def test_interp_datetime64(self, method, tz_naive_fixture):
df = Series(
[1, np.nan, 3], index=date_range("1/1/2000", periods=3, tz=tz_naive_fixture)
)
df = Series([1, np.nan, 3], index=date_range("1/1/2000", periods=3, tz=tz_naive_fixture))
result = df.interpolate(method=method)
expected = Series(
[1.0, 1.0, 3.0],
index=date_range("1/1/2000", periods=3, tz=tz_naive_fixture),
)
expected = Series([1.0, 1.0, 3.0], index=date_range("1/1/2000", periods=3, tz=tz_naive_fixture))
tm.assert_series_equal(result, expected)

def test_interp_pad_datetime64tz_values(self):
Expand Down Expand Up @@ -658,14 +612,7 @@ def test_series_interpolate_intraday(self):

tm.assert_numpy_array_equal(result.values, exp.values)

@pytest.mark.parametrize(
"ind",
[
["a", "b", "c", "d"],
pd.period_range(start="2019-01-01", periods=4),
pd.interval_range(start=0, end=4),
],
)
@pytest.mark.parametrize("ind",[["a", "b", "c", "d"], pd.period_range(start="2019-01-01", periods=4), pd.interval_range(start=0, end=4)])
def test_interp_non_timedelta_index(self, interp_methods_ind, ind):
# gh 21662
df = pd.DataFrame([0, 1, np.nan, 3], index=ind)
Expand Down Expand Up @@ -711,10 +658,7 @@ def test_interpolate_timedelta_index(self, interp_methods_ind):
"This interpolation method is not supported for Timedelta Index yet."
)

@pytest.mark.parametrize(
"ascending, expected_values",
[(True, [1, 2, 3, 9, 10]), (False, [10, 9, 3, 2, 1])],
)
@pytest.mark.parametrize("ascending, expected_values", [(True, [1, 2, 3, 9, 10]), (False, [10, 9, 3, 2, 1])])
def test_interpolate_unsorted_index(self, ascending, expected_values):
# GH 21037
ts = pd.Series(data=[10, 9, np.nan, 2, 1], index=[10, 9, 3, 2, 1])
Expand Down
56 changes: 12 additions & 44 deletions pandas/tests/series/methods/test_unstack.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,34 +7,21 @@


def test_unstack():
index = MultiIndex(
levels=[["bar", "foo"], ["one", "three", "two"]],
codes=[[1, 1, 0, 0], [0, 1, 0, 2]],
)
index = MultiIndex(levels=[["bar", "foo"], ["one", "three", "two"]], codes=[[1, 1, 0, 0], [0, 1, 0, 2]])

s = Series(np.arange(4.0), index=index)
unstacked = s.unstack()

expected = DataFrame(
[[2.0, np.nan, 3.0], [0.0, 1.0, np.nan]],
index=["bar", "foo"],
columns=["one", "three", "two"],
)
expected = DataFrame([[2.0, np.nan, 3.0], [0.0, 1.0, np.nan]], index=["bar", "foo"], columns=["one", "three", "two"])

tm.assert_frame_equal(unstacked, expected)

unstacked = s.unstack(level=0)
tm.assert_frame_equal(unstacked, expected.T)

index = MultiIndex(
levels=[["bar"], ["one", "two", "three"], [0, 1]],
codes=[[0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1]],
)
index = MultiIndex(levels=[["bar"], ["one", "two", "three"], [0, 1]], codes=[[0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1]])
s = Series(np.random.randn(6), index=index)
exp_index = MultiIndex(
levels=[["one", "two", "three"], [0, 1]],
codes=[[0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1]],
)
exp_index = MultiIndex(levels=[["one", "two", "three"], [0, 1]], codes=[[0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1]])
expected = DataFrame({"bar": s.values}, index=exp_index).sort_index(level=0)
unstacked = s.unstack(0).sort_index()
tm.assert_frame_equal(unstacked, expected)
Expand All @@ -48,18 +35,9 @@ def test_unstack():
)
tm.assert_frame_equal(left, right)

idx = pd.MultiIndex.from_arrays(
[
["cat", "cat", "cat", "dog", "dog"],
["a", "a", "b", "a", "b"],
[1, 2, 1, 1, np.nan],
]
)
idx = pd.MultiIndex.from_arrays([["cat", "cat", "cat", "dog", "dog"], ["a", "a", "b", "a", "b"], [1, 2, 1, 1, np.nan]])
ts = pd.Series([1.0, 1.1, 1.2, 1.3, 1.4], index=idx)
right = DataFrame(
[[1.0, 1.3], [1.1, np.nan], [np.nan, 1.4], [1.2, np.nan]],
columns=["cat", "dog"],
)
right = DataFrame([[1.0, 1.3], [1.1, np.nan], [np.nan, 1.4], [1.2, np.nan]], columns=["cat", "dog"])
tpls = [("a", 1), ("a", 2), ("b", np.nan), ("b", 1)]
right.index = pd.MultiIndex.from_tuples(tpls)
tm.assert_frame_equal(ts.unstack(level=0), right)
Expand All @@ -73,11 +51,7 @@ def test_unstack_tuplename_in_multiindex():
ser = pd.Series(1, index=idx)
result = ser.unstack(("A", "a"))

expected = pd.DataFrame(
[[1, 1, 1], [1, 1, 1], [1, 1, 1]],
columns=pd.MultiIndex.from_tuples([("a",), ("b",), ("c",)], names=[("A", "a")]),
index=pd.Index([1, 2, 3], name=("B", "b")),
)
expected = pd.DataFrame([[1, 1, 1], [1, 1, 1], [1, 1, 1]], columns=pd.MultiIndex.from_tuples([("a",), ("b",), ("c",)], names=[("A", "a")]), index=pd.Index([1, 2, 3], name=("B", "b")))
tm.assert_frame_equal(result, expected)


Expand All @@ -96,12 +70,10 @@ def test_unstack_tuplename_in_multiindex():
(("A", "a"), "B"),
[[1, 1, 1, 1], [1, 1, 1, 1]],
pd.Index([3, 4], name="C"),
pd.MultiIndex.from_tuples(
[("a", 1), ("a", 2), ("b", 1), ("b", 2)], names=[("A", "a"), "B"]
),
),
],
)
pd.MultiIndex.from_tuples([("a", 1), ("a", 2), ("b", 1), ("b", 2)], names=[("A", "a"), "B"]))]
)


def test_unstack_mixed_type_name_in_multiindex(
unstack_idx, expected_values, expected_index, expected_columns
):
Expand All @@ -127,9 +99,5 @@ def test_unstack_multi_index_categorical_values():

dti = ser.index.levels[0]
c = pd.Categorical(["foo"] * len(dti))
expected = DataFrame(
{"A": c.copy(), "B": c.copy(), "C": c.copy(), "D": c.copy()},
columns=pd.Index(list("ABCD"), name="minor"),
index=dti.rename("major"),
)
expected = DataFrame({"A": c.copy(), "B": c.copy(), "C": c.copy(), "D": c.copy()}, columns=pd.Index(list("ABCD"), name="minor"), index=dti.rename("major"))
tm.assert_frame_equal(result, expected)
17 changes: 3 additions & 14 deletions pandas/tests/series/test_cumulative.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,7 @@ def test_cumprod(self, datetime_series):
_check_accum_op("cumprod", datetime_series)

def test_cummin(self, datetime_series):
tm.assert_numpy_array_equal(
datetime_series.cummin().values,
np.minimum.accumulate(np.array(datetime_series)),
)
tm.assert_numpy_array_equal(datetime_series.cummin().values, np.minimum.accumulate(np.array(datetime_series)))
ts = datetime_series.copy()
ts[::2] = np.NaN
result = ts.cummin()[1::2]
Expand All @@ -51,10 +48,7 @@ def test_cummin(self, datetime_series):
tm.assert_series_equal(result, expected)

def test_cummax(self, datetime_series):
tm.assert_numpy_array_equal(
datetime_series.cummax().values,
np.maximum.accumulate(np.array(datetime_series)),
)
tm.assert_numpy_array_equal(datetime_series.cummax().values, np.maximum.accumulate(np.array(datetime_series)))
ts = datetime_series.copy()
ts[::2] = np.NaN
result = ts.cummax()[1::2]
Expand Down Expand Up @@ -148,12 +142,7 @@ def test_cummethods_bool(self):
b = ~a
c = pd.Series([False] * len(b))
d = ~c
methods = {
"cumsum": np.cumsum,
"cumprod": np.cumprod,
"cummin": np.minimum.accumulate,
"cummax": np.maximum.accumulate,
}
methods = {"cumsum": np.cumsum, "cumprod": np.cumprod, "cummin": np.minimum.accumulate, "cummax": np.maximum.accumulate}
args = product((a, b, c, d), methods)
for s, method in args:
expected = pd.Series(methods[method](s.values))
Expand Down
Loading