Skip to content

Commit

Permalink
fix(duckdb): handle null-typed scalars in to_pyarrow()
Browse files Browse the repository at this point in the history
  • Loading branch information
NickCrews authored and cpcloud committed Dec 31, 2024
1 parent 9f94ad4 commit c34c5ac
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 4 deletions.
6 changes: 6 additions & 0 deletions ibis/backends/duckdb/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,12 @@ def convert_Array(s, dtype, pandas_type):


class DuckDBPyArrowData(PyArrowData):
@classmethod
def convert_scalar(cls, scalar: pa.Scalar, dtype: dt.DataType) -> pa.Scalar:
if dtype.is_null():
return pa.scalar(None)
return super().convert_scalar(scalar, dtype)

@classmethod
def convert_column(cls, column: pa.Array, dtype: dt.DataType) -> pa.Array:
if dtype.is_null():
Expand Down
24 changes: 20 additions & 4 deletions ibis/backends/tests/test_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -624,8 +624,7 @@ def test_scalar_to_memory(limit, awards_players, output_format, converter):
assert converter(res) is None


# flink
@pytest.mark.notyet(
mark_notyet_nulls = pytest.mark.notyet(
[
"clickhouse",
"exasol",
Expand All @@ -639,9 +638,26 @@ def test_scalar_to_memory(limit, awards_players, output_format, converter):
"trino",
],
raises=com.IbisTypeError,
reason="unable to handle null typed columns as input",
reason="unable to handle null types as input",
)
def test_all_null_column(con):


@mark_notyet_nulls
def test_all_null_table(con):
t = ibis.memtable({"a": [None]})
result = con.to_pyarrow(t)
assert pat.is_null(result["a"].type)


@mark_notyet_nulls
def test_all_null_column(con):
t = ibis.memtable({"a": [None]})
result = con.to_pyarrow(t.a)
assert pat.is_null(result.type)


@mark_notyet_nulls
def test_all_null_scalar(con):
e = ibis.literal(None)
result = con.to_pyarrow(e)
assert pat.is_null(result.type)

0 comments on commit c34c5ac

Please sign in to comment.