Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor(api): re-enable deprecation checks #10596

Merged
merged 6 commits into from
Dec 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions .releaserc.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,7 @@ module.exports = {
{
verifyConditionsCmd:
"ci/release/verify_conditions.sh ${options.dryRun}",
// TODO(cpcloud): re-enable once deprecation removals for 10.0 are merged
// verifyReleaseCmd: "ci/release/verify_release.sh ${nextRelease.version}",
verifyReleaseCmd: "ci/release/verify_release.sh ${nextRelease.version}",
prepareCmd: "ci/release/prepare.sh ${nextRelease.version}",
publishCmd: "ci/release/publish.sh"
}
Expand Down
9 changes: 8 additions & 1 deletion flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,14 @@

release = pkgs.mkShell {
name = "release";
packages = with pkgs; [ git uv nodejs_20 unzip gnugrep python3 ];
packages = with pkgs; [
git
uv
nodejs_20
unzip
gnugrep
(python3.withPackages (p: [ p.packaging ]))
];
};
};
}
Expand Down
45 changes: 4 additions & 41 deletions ibis/backends/duckdb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -808,43 +808,6 @@ def _read_parquet_pyarrow_dataset(
# by the time we execute against this so we register it
# explicitly.

@util.deprecated(
instead="Pass in-memory data to `memtable` instead.",
as_of="9.1",
removed_in="10.0",
)
def read_in_memory(
self,
source: pd.DataFrame
| pa.Table
| pa.RecordBatchReader
| pl.DataFrame
| pl.LazyFrame,
table_name: str | None = None,
) -> ir.Table:
"""Register an in-memory table object in the current database.

Supported objects include pandas DataFrame, a Polars
DataFrame/LazyFrame, or a PyArrow Table or RecordBatchReader.

Parameters
----------
source
The data source.
table_name
An optional name to use for the created table. This defaults to
a sequentially generated name.

Returns
-------
ir.Table
The just-registered table

"""
table_name = table_name or util.gen_name("read_in_memory")
_read_in_memory(source, table_name, self)
return self.table(table_name)

def read_delta(
self,
source_table: str,
Expand Down Expand Up @@ -1367,7 +1330,7 @@ def to_torch(
*,
params: Mapping[ir.Scalar, Any] | None = None,
limit: int | str | None = None,
**kwargs: Any,
**_: Any,
) -> dict[str, torch.Tensor]:
"""Execute an expression and return results as a dictionary of torch tensors.

Expand Down Expand Up @@ -1678,7 +1641,7 @@ def _create_temp_view(self, table_name, source):


@lazy_singledispatch
def _read_in_memory(source: Any, table_name: str, _conn: Backend, **kwargs: Any):
def _read_in_memory(source: Any, table_name: str, _conn: Backend, **_: Any):
raise NotImplementedError(
f"The `{_conn.name}` backend currently does not support "
f"reading data of {type(source)!r}"
Expand All @@ -1690,12 +1653,12 @@ def _read_in_memory(source: Any, table_name: str, _conn: Backend, **kwargs: Any)
@_read_in_memory.register("pyarrow.Table")
@_read_in_memory.register("pandas.DataFrame")
@_read_in_memory.register("pyarrow.dataset.Dataset")
def _default(source, table_name, _conn, **kwargs: Any):
def _default(source, table_name, _conn, **_: Any):
_conn.con.register(table_name, source)


@_read_in_memory.register("pyarrow.RecordBatchReader")
def _pyarrow_rbr(source, table_name, _conn, **kwargs: Any):
def _pyarrow_rbr(source, table_name, _conn, **_: Any):
_conn.con.register(table_name, source)
# Ensure the reader isn't marked as started, in case the name is
# being overwritten.
Expand Down
48 changes: 10 additions & 38 deletions ibis/backends/duckdb/tests/test_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,21 +275,6 @@ def test_attach_sqlite(data_dir, tmp_path):
assert dt.String(nullable=True) in set(types)


def test_re_read_in_memory_overwrite(con):
df_pandas_1 = pd.DataFrame({"a": ["a"], "b": [1], "d": ["hi"]})
df_pandas_2 = pd.DataFrame({"a": [1], "c": [1.4]})

with pytest.warns(FutureWarning, match="memtable"):
table = con.read_in_memory(df_pandas_1, table_name="df")
assert len(table.columns) == 3
assert table.schema() == ibis.schema([("a", "str"), ("b", "int"), ("d", "str")])

with pytest.warns(FutureWarning, match="memtable"):
table = con.read_in_memory(df_pandas_2, table_name="df")
assert len(table.columns) == 2
assert table.schema() == ibis.schema([("a", "int"), ("c", "float")])


def test_memtable_with_nullable_dtypes(con):
data = pd.DataFrame(
{
Expand Down Expand Up @@ -381,37 +366,24 @@ def test_s3_403_fallback(con, httpserver, monkeypatch):

def test_register_numpy_str(con):
data = pd.DataFrame({"a": [np.str_("xyz"), None]})
with pytest.warns(FutureWarning, match="memtable"):
result = con.read_in_memory(data)
tm.assert_frame_equal(result.execute(), data)
result = ibis.memtable(data)
tm.assert_frame_equal(con.execute(result), data)


def test_register_recordbatchreader_warns(con):
def test_memtable_recordbatchreader_raises(con):
table = pa.Table.from_batches(
[
pa.RecordBatch.from_pydict({"x": [1, 2]}),
pa.RecordBatch.from_pydict({"x": [3, 4]}),
]
map(pa.RecordBatch.from_pydict, [{"x": [1, 2]}, {"x": [3, 4]}])
)
reader = table.to_reader()
sol = table.to_pandas()
with pytest.warns(FutureWarning, match="memtable"):
t = con.read_in_memory(reader)

# First execute is fine
res = t.execute()
tm.assert_frame_equal(res, sol)
with pytest.raises(TypeError):
ibis.memtable(reader)

# Later executes warn
with pytest.warns(UserWarning, match="RecordBatchReader"):
t.limit(2).execute()
t = ibis.memtable(reader.read_all())

# Re-registering over the name with a new reader is fine
reader = table.to_reader()
with pytest.warns(FutureWarning, match="memtable"):
t = con.read_in_memory(reader, table_name=t.get_name())
res = t.execute()
tm.assert_frame_equal(res, sol)
# First execute is fine
res = con.execute(t)
tm.assert_frame_equal(res, table.to_pandas())


def test_csv_with_slash_n_null(con, tmp_path):
Expand Down
15 changes: 2 additions & 13 deletions ibis/expr/types/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from ibis.common.grounds import Singleton
from ibis.expr.rewrites import rewrite_window_input
from ibis.expr.types.core import Expr, _binop, _FixedTextJupyterMixin, _is_null_literal
from ibis.util import deprecated, promote_list, warn_deprecated
from ibis.util import deprecated, promote_list

if TYPE_CHECKING:
import pandas as pd
Expand Down Expand Up @@ -2142,9 +2142,7 @@ def topk(

return table.aggregate(metric, by=[self]).order_by(metric.desc()).limit(k)

def arbitrary(
self, where: ir.BooleanValue | None = None, how: Any = None
) -> Scalar:
def arbitrary(self, where: ir.BooleanValue | None = None) -> Scalar:
"""Select an arbitrary value in a column.

Returns an arbitrary (nondeterministic, backend-specific) value from
Expand All @@ -2155,8 +2153,6 @@ def arbitrary(
----------
where
A filter expression
how
DEPRECATED

Returns
-------
Expand Down Expand Up @@ -2188,13 +2184,6 @@ def arbitrary(
│ 2 │ a │ 8.3 │
└───────┴────────┴─────────┘
"""
if how is not None:
warn_deprecated(
name="how",
as_of="9.0",
removed_in="10.0",
instead="call `first` or `last` explicitly",
)
return ops.Arbitrary(self, where=self._bind_to_parent_table(where)).to_expr()

def count(self, where: ir.BooleanValue | None = None) -> ir.IntegerScalar:
Expand Down
7 changes: 0 additions & 7 deletions ibis/expr/types/strings.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,13 +410,6 @@ def capitalize(self) -> StringValue:
"""
return ops.Capitalize(self).to_expr()

@util.deprecated(
instead="use the `capitalize` method", as_of="9.0", removed_in="10.0"
)
def initcap(self) -> StringValue:
"""Deprecated. Use `capitalize` instead."""
return self.capitalize()

def __contains__(self, *_: Any) -> bool:
raise TypeError("Use string_expr.contains(arg)")

Expand Down
22 changes: 8 additions & 14 deletions ibis/tests/expr/test_value_exprs.py
Original file line number Diff line number Diff line change
Expand Up @@ -442,7 +442,7 @@ def test_cast_same_type_noop(table):
assert i.cast("int8") is i


def test_string_slice_step(table):
def test_string_slice_step():
s = ibis.literal("abcde")
s[1:3]
s[1:3:1]
Expand Down Expand Up @@ -509,7 +509,7 @@ def test_negate_boolean_column(table):


@pytest.mark.parametrize("column", ["a", "b"])
@pytest.mark.parametrize("condition_fn", [lambda t: None, lambda t: t.a > 8])
@pytest.mark.parametrize("condition_fn", [lambda _: None, lambda t: t.a > 8])
def test_arbitrary(table, column, condition_fn):
col = table[column]
where = condition_fn(table)
Expand All @@ -519,12 +519,6 @@ def test_arbitrary(table, column, condition_fn):
assert isinstance(expr.op(), ops.Arbitrary)


def test_arbitrary_how_deprecated(table):
with pytest.warns(FutureWarning, match="v9.0"):
out = table.a.arbitrary(how="last")
assert isinstance(out.op(), ops.Arbitrary)


@pytest.mark.parametrize(
["column", "operation"],
[
Expand Down Expand Up @@ -803,12 +797,12 @@ def test_literal_promotions(table, op, name, case, ex_type):
@pytest.mark.parametrize(
("op", "left_fn", "right_fn", "ex_type"),
[
(operator.sub, lambda t: t["a"], lambda t: 0, "int8"),
(operator.sub, lambda t: 0, lambda t: t["a"], "int16"),
(operator.sub, lambda t: t["b"], lambda t: 0, "int16"),
(operator.sub, lambda t: 0, lambda t: t["b"], "int32"),
(operator.sub, lambda t: t["c"], lambda t: 0, "int32"),
(operator.sub, lambda t: 0, lambda t: t["c"], "int64"),
(operator.sub, lambda t: t["a"], lambda _: 0, "int8"),
(operator.sub, lambda _: 0, lambda t: t["a"], "int16"),
(operator.sub, lambda t: t["b"], lambda _: 0, "int16"),
(operator.sub, lambda _: 0, lambda t: t["b"], "int32"),
(operator.sub, lambda t: t["c"], lambda _: 0, "int32"),
(operator.sub, lambda _: 0, lambda t: t["c"], "int64"),
],
ids=lambda arg: str(getattr(arg, "__name__", arg)),
)
Expand Down
Loading