Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(deps): bump flakes #10571

Merged
merged 1 commit into from
Dec 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ repos:
name: shellcheck
language: system
entry: shellcheck
args: ["--shell", "bash"]
types_or:
- sh
- shell
Expand Down
51 changes: 39 additions & 12 deletions flake.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 11 additions & 1 deletion flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,15 @@
nixpkgs.follows = "nixpkgs";
};
};

pyproject-build-systems = {
url = "github:pyproject-nix/build-system-pkgs";
inputs = {
pyproject-nix.follows = "pyproject-nix";
uv2nix.follows = "uv2nix";
nixpkgs.follows = "nixpkgs";
};
};
};

outputs =
Expand All @@ -32,11 +41,12 @@
, nixpkgs
, pyproject-nix
, uv2nix
, pyproject-build-systems
, ...
}: {
overlays.default = nixpkgs.lib.composeManyExtensions [
gitignore.overlay
(import ./nix/overlay.nix { inherit uv2nix pyproject-nix; })
(import ./nix/overlay.nix { inherit uv2nix pyproject-nix pyproject-build-systems; })
];
} // flake-utils.lib.eachDefaultSystem (
localSystem:
Expand Down
8 changes: 4 additions & 4 deletions ibis/backends/impala/tests/test_exprs.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def test_builtins(con, alltypes):
s.repeat(i1),
]

proj_exprs = [expr.name("e%d" % i) for i, expr in enumerate(exprs)]
proj_exprs = [expr.name(f"e{i:d}") for i, expr in enumerate(exprs)]

projection = table.select(proj_exprs)
projection.limit(10).execute()
Expand Down Expand Up @@ -418,7 +418,7 @@ def test_decimal_timestamp_builtins(con):
exprs.append(ts + offset)
exprs.append(ts - offset)

proj_exprs = [expr.name("e%d" % i) for i, expr in enumerate(exprs)]
proj_exprs = [expr.name(f"e{i:d}") for i, expr in enumerate(exprs)]

projection = table.select(proj_exprs).limit(10)
projection.execute()
Expand Down Expand Up @@ -474,7 +474,7 @@ def test_aggregations(alltypes):
d.var(where=cond),
]

metrics = [expr.name("e%d" % i) for i, expr in enumerate(exprs)]
metrics = [expr.name(f"e{i:d}") for i, expr in enumerate(exprs)]

agged_table = table.aggregate(metrics)
agged_table.execute()
Expand Down Expand Up @@ -511,7 +511,7 @@ def test_analytic_functions(alltypes):
f.max(),
]

proj_exprs = [expr.name("e%d" % i) for i, expr in enumerate(exprs)]
proj_exprs = [expr.name(f"e{i:d}") for i, expr in enumerate(exprs)]

proj_table = g.mutate(proj_exprs)
proj_table.execute()
Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/impala/udf.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import ibis.expr.operations as ops
from ibis import util

__all__ = ["scalar_function", "aggregate_function", "wrap_udf", "wrap_uda"]
__all__ = ["aggregate_function", "scalar_function", "wrap_uda", "wrap_udf"]


class Function(abc.ABC):
Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/oracle/tests/test_client.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

from datetime import date # noqa: TCH003
from datetime import date # noqa: TC003

import oracledb
import pandas as pd
Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/polars/rewrites.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from ibis.common.annotations import attribute
from ibis.common.collections import FrozenDict
from ibis.common.patterns import replace
from ibis.common.typing import VarTuple # noqa: TCH001
from ibis.common.typing import VarTuple # noqa: TC001
from ibis.expr.schema import Schema


Expand Down
4 changes: 2 additions & 2 deletions ibis/backends/sql/compilers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
__all__ = [
"BigQueryCompiler",
"ClickHouseCompiler",
"DatabricksCompiler",
"DataFusionCompiler",
"DatabricksCompiler",
"DruidCompiler",
"DuckDBCompiler",
"ExasolCompiler",
Expand All @@ -16,8 +16,8 @@
"PostgresCompiler",
"PySparkCompiler",
"RisingWaveCompiler",
"SnowflakeCompiler",
"SQLiteCompiler",
"SnowflakeCompiler",
"TrinoCompiler",
]

Expand Down
4 changes: 2 additions & 2 deletions ibis/backends/sql/compilers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ class AggGen:
class _Accessor:
"""An internal type to handle getattr/getitem access."""

__slots__ = ("handler", "compiler")
__slots__ = ("compiler", "handler")

def __init__(self, handler: Callable, compiler: SQLGlotCompiler):
self.handler = handler
Expand Down Expand Up @@ -175,7 +175,7 @@ def __getitem__(self, key: str) -> Callable[..., sge.Anonymous]:


class FuncGen:
__slots__ = ("dialect", "namespace", "anon", "copy")
__slots__ = ("anon", "copy", "dialect", "namespace")

def __init__(
self, *, dialect: sg.Dialect, namespace: str | None = None, copy: bool = False
Expand Down
4 changes: 2 additions & 2 deletions ibis/backends/sql/rewrites.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@
import ibis.expr.datatypes as dt
import ibis.expr.operations as ops
from ibis.common.annotations import attribute
from ibis.common.collections import FrozenDict # noqa: TCH001
from ibis.common.collections import FrozenDict # noqa: TC001
from ibis.common.deferred import var
from ibis.common.graph import Graph
from ibis.common.patterns import InstanceOf, Object, Pattern, replace
from ibis.common.typing import VarTuple # noqa: TCH001
from ibis.common.typing import VarTuple # noqa: TC001
from ibis.expr.rewrites import d, p, replace_parameter
from ibis.expr.schema import Schema

Expand Down
19 changes: 13 additions & 6 deletions ibis/backends/tests/test_aggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,12 +470,19 @@ def mean_and_std(v):
lambda t, where: t.string_col.approx_nunique(where=where),
lambda t, where: t.string_col[where].nunique(),
id="approx_nunique",
marks=pytest.mark.xfail_version(
duckdb=["duckdb>=1.1"],
raises=AssertionError,
reason="not exact, even at this tiny scale",
strict=False,
),
marks=[
pytest.mark.xfail_version(
duckdb=["duckdb>=1.1"],
raises=AssertionError,
reason="not exact, even at this tiny scale",
strict=False,
),
pytest.mark.notimpl(
["datafusion"],
reason="data type is not supported",
raises=Exception,
),
],
),
param(
lambda t, where: t.bigint_col.bit_and(where=where),
Expand Down
4 changes: 3 additions & 1 deletion ibis/backends/tests/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
reason="nix on linux cannot download duckdb extensions or data due to sandboxing",
)
@pytest.mark.notimpl(["pyspark", "exasol", "databricks"])
@pytest.mark.notyet(["clickhouse", "druid", "impala", "mssql", "trino", "risingwave"])
@pytest.mark.notyet(
["clickhouse", "druid", "impala", "mssql", "trino", "risingwave", "datafusion"]
)
@pytest.mark.parametrize(
("example", "columns"),
[
Expand Down
7 changes: 4 additions & 3 deletions ibis/backends/tests/test_expr_caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def test_persist_expression_contextmanager(backend, con, alltypes):
assert non_cached_table.op() not in con._cache_op_to_entry


@mark.notimpl(["datafusion", "flink", "impala", "trino", "druid"])
@mark.notimpl(["flink", "impala", "trino", "druid"])
@pytest.mark.never(
["risingwave"],
raises=com.UnsupportedOperationError,
Expand Down Expand Up @@ -83,6 +83,7 @@ def test_persist_expression_multiple_refs(backend, con, alltypes):
backend.assert_frame_equal(
non_cached_table.order_by("id").to_pandas(),
cached_table.order_by("id").to_pandas(),
check_dtype=False,
)

name = cached_table.op().name
Expand All @@ -105,7 +106,7 @@ def test_persist_expression_multiple_refs(backend, con, alltypes):
assert name not in con.list_tables()


@mark.notimpl(["datafusion", "flink", "impala", "trino", "druid"])
@mark.notimpl(["flink", "impala", "trino", "druid"])
@mark.notimpl(["exasol"], reason="Exasol does not support temporary tables")
@pytest.mark.never(
["risingwave"],
Expand All @@ -127,7 +128,7 @@ def test_persist_expression_repeated_cache(alltypes, con):
assert name not in con.list_tables()


@mark.notimpl(["datafusion", "flink", "impala", "trino", "druid"])
@mark.notimpl(["flink", "impala", "trino", "druid"])
@mark.notimpl(["exasol"], reason="Exasol does not support temporary tables")
@pytest.mark.never(
["risingwave"],
Expand Down
5 changes: 0 additions & 5 deletions ibis/backends/tests/test_temporal.py
Original file line number Diff line number Diff line change
Expand Up @@ -781,11 +781,6 @@ def convert_to_offset(x):
raises=AssertionError,
reason="duckdb returns dateoffsets",
),
pytest.mark.notimpl(
["trino"],
raises=AssertionError,
reason="doesn't match pandas results, unclear what the issue is, perhaps timezones",
),
pytest.mark.notimpl(
["flink"],
raises=Py4JJavaError,
Expand Down
9 changes: 1 addition & 8 deletions ibis/backends/tests/tpc/ds/test_queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -2072,11 +2072,6 @@ def test_40(catalog_sales, catalog_returns, warehouse, item, date_dim):


@tpc_test("ds")
@pytest.mark.notyet(
["datafusion"],
raises=Exception,
reason="Error during planning: Correlated column is not allowed in predicate",
)
@pytest.mark.notyet(
["clickhouse"],
raises=ClickHouseDatabaseError,
Expand Down Expand Up @@ -2230,9 +2225,6 @@ def test_44(store_sales, item):


@tpc_test("ds")
@pytest.mark.notyet(
["datafusion"], raises=Exception, reason="Unsupported feature in DataFusion SQL"
)
def test_45(web_sales, customer, customer_address, date_dim, item):
return (
web_sales.join(customer, [("ws_bill_customer_sk", "c_customer_sk")])
Expand Down Expand Up @@ -4647,6 +4639,7 @@ def items(returns, *, prefix):


@tpc_test("ds")
@pytest.mark.notyet(["datafusion"], reason="coercion failure", raises=Exception)
def test_84(
customer,
customer_address,
Expand Down
24 changes: 13 additions & 11 deletions ibis/backends/trino/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,18 @@
@classmethod
def convert_Interval(cls, s, dtype, pandas_dtype):
def parse_trino_timedelta(value):
# format is 'days hour:minute:second.millisecond'
days, rest = value.split(" ", 1)
hms, millis = rest.split(".", 1)
hours, minutes, seconds = hms.split(":")
return datetime.timedelta(
days=int(days),
hours=int(hours),
minutes=int(minutes),
seconds=int(seconds),
milliseconds=int(millis),
)
if isinstance(value, str):
# format is 'days hour:minute:second.millisecond'
days, rest = value.split(" ", 1)
hms, millis = rest.split(".", 1)
hours, minutes, seconds = hms.split(":")
return datetime.timedelta(

Check warning on line 17 in ibis/backends/trino/converter.py

View check run for this annotation

Codecov / codecov/patch

ibis/backends/trino/converter.py#L14-L17

Added lines #L14 - L17 were not covered by tests
days=int(days),
hours=int(hours),
minutes=int(minutes),
seconds=int(seconds),
milliseconds=int(millis),
)
return value

return s.map(parse_trino_timedelta, na_action="ignore")
Loading
Loading