Skip to content

Commit

Permalink
test(snowflake,bigquery): enable xpassing tests and relax read_csv sc…
Browse files Browse the repository at this point in the history
…hema assertion from float64 to numeric (#10589)
  • Loading branch information
cpcloud authored Dec 17, 2024
1 parent 2d46fbb commit 01e4498
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 48 deletions.
2 changes: 1 addition & 1 deletion flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@
ibis312 = mkDevShell pkgs.ibisDevEnv312;
ibis313 = mkDevShell pkgs.ibisDevEnv313;

default = ibis313;
default = ibis312;

preCommit = pkgs.mkShell {
name = "preCommit";
Expand Down
52 changes: 5 additions & 47 deletions ibis/backends/tests/test_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,14 +119,12 @@ def test_read_csv(con, data_dir, in_table_name, num_diamonds):
@pytest.mark.notimpl(["datafusion"])
@pytest.mark.notyet(
[
"bigquery",
"flink",
"impala",
"mssql",
"mysql",
"postgres",
"risingwave",
"snowflake",
"sqlite",
"trino",
"databricks",
Expand All @@ -140,18 +138,7 @@ def test_read_csv_gz(con, data_dir, gzip_csv):


@pytest.mark.notyet(
[
"bigquery",
"flink",
"impala",
"mssql",
"mysql",
"postgres",
"risingwave",
"snowflake",
"sqlite",
"trino",
]
["flink", "impala", "mssql", "mysql", "postgres", "risingwave", "sqlite", "trino"]
)
def test_read_csv_with_dotted_name(con, data_dir, tmp_path):
basename = "foo.bar.baz/diamonds.csv"
Expand All @@ -166,18 +153,7 @@ def test_read_csv_with_dotted_name(con, data_dir, tmp_path):


@pytest.mark.notyet(
[
"bigquery",
"flink",
"impala",
"mssql",
"mysql",
"postgres",
"risingwave",
"snowflake",
"sqlite",
"trino",
]
["flink", "impala", "mssql", "mysql", "postgres", "risingwave", "sqlite", "trino"]
)
def test_read_csv_schema(con, tmp_path):
foo = tmp_path.joinpath("foo.csv")
Expand All @@ -197,21 +173,12 @@ def test_read_csv_schema(con, tmp_path):

assert result_schema.names == ("cola", "colb", "colc")
assert result_schema["cola"].is_integer()
assert result_schema["colb"].is_float64()
assert result_schema["colb"].is_numeric()
assert result_schema["colc"].is_string()


@pytest.mark.notyet(
[
"flink",
"impala",
"mssql",
"mysql",
"postgres",
"risingwave",
"sqlite",
"trino",
]
["flink", "impala", "mssql", "mysql", "postgres", "risingwave", "sqlite", "trino"]
)
def test_read_csv_glob(con, tmp_path, ft_data):
pc = pytest.importorskip("pyarrow.csv")
Expand Down Expand Up @@ -314,16 +281,7 @@ def test_read_parquet_iterator(


@pytest.mark.notyet(
[
"flink",
"impala",
"mssql",
"mysql",
"postgres",
"risingwave",
"sqlite",
"trino",
]
["flink", "impala", "mssql", "mysql", "postgres", "risingwave", "sqlite", "trino"]
)
def test_read_parquet_glob(con, tmp_path, ft_data):
pq = pytest.importorskip("pyarrow.parquet")
Expand Down

0 comments on commit 01e4498

Please sign in to comment.