Skip to content

Commit

Permalink
Update tools used in pre-commit testing. (#1741)
Browse files Browse the repository at this point in the history
This updates us to the latest versions of Black and Ruff, which results
in some minor non-behavioral changes to the code.
  • Loading branch information
thetorpedodog authored Sep 29, 2023
1 parent d5dee48 commit 649bf14
Show file tree
Hide file tree
Showing 14 changed files with 12 additions and 33 deletions.
12 changes: 6 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
exclude: ^doc/source/
repos:
- repo: https://github.com/ambv/black
rev: 22.12.0
- repo: https://github.com/psf/black
rev: "23.9.1"
hooks:
- id: black
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.226
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.291
hooks:
- id: ruff
args: ["--force-exclude", "--config=apis/python/pyproject.toml"]
args: ["--config=apis/python/pyproject.toml"]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.0.0
rev: v1.5.1
hooks:
- id: mypy
additional_dependencies:
Expand Down
4 changes: 2 additions & 2 deletions apis/python/src/tiledbsoma/_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ class _CachedElement:
"""The reified object, if it has been opened."""


class CollectionBase(
class CollectionBase( # type: ignore[misc] # __eq__ false positive
TileDBObject[_tdb_handles.GroupWrapper],
somacore.collection.BaseCollection[CollectionElementType],
):
Expand Down Expand Up @@ -628,7 +628,7 @@ def _check_allows_child(cls, key: str, child_cls: type) -> None:
AnyTileDBCollection = CollectionBase[Any]


class Collection(
class Collection( # type: ignore[misc] # __eq__ false positive
CollectionBase[CollectionElementType], somacore.Collection[CollectionElementType]
):
""":class:`Collection` is a persistent container of named SOMA objects, stored as
Expand Down
2 changes: 1 addition & 1 deletion apis/python/src/tiledbsoma/_experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from ._tiledb_object import AnyTileDBObject


class Experiment(
class Experiment( # type: ignore[misc] # __eq__ false positive
CollectionBase[AnyTileDBObject],
experiment.Experiment[ # type: ignore[type-var]
DataFrame,
Expand Down
2 changes: 1 addition & 1 deletion apis/python/src/tiledbsoma/_measurement.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from ._tiledb_object import AnyTileDBObject


class Measurement(
class Measurement( # type: ignore[misc] # __eq__ false positive
CollectionBase[AnyTileDBObject],
measurement.Measurement[ # type: ignore[type-var]
DataFrame,
Expand Down
1 change: 0 additions & 1 deletion apis/python/src/tiledbsoma/io/_registration/signatures.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,6 @@ def from_soma_experiment(
"""

with tiledbsoma.Experiment.open(uri, context=context) as exp:

obs_schema = _string_dict_from_arrow_schema(exp.obs.schema)

var_schema = _string_dict_from_arrow_schema(
Expand Down
7 changes: 1 addition & 6 deletions apis/python/src/tiledbsoma/io/ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
_NDArr = TypeVar("_NDArr", bound=NDArray)
_TDBO = TypeVar("_TDBO", bound=TileDBObject[RawHandle])


# ----------------------------------------------------------------
class IngestionParams:
"""Maps from user-level ingest modes to a set of implementation-level boolean flags."""
Expand All @@ -93,7 +94,6 @@ def __init__(
ingest_mode: str,
label_mapping: Optional[ExperimentAmbientLabelMapping],
) -> None:

if ingest_mode == "schema_only":
self.write_schema_no_data = True
self.error_if_already_exists = True
Expand Down Expand Up @@ -1125,7 +1125,6 @@ def _write_dataframe(
context: Optional[SOMATileDBContext] = None,
axis_mapping: AxisIDMapping,
) -> DataFrame:

_util.get_start_stamp()
logging.log_io(None, f"START WRITING {df_uri}")

Expand Down Expand Up @@ -1468,7 +1467,6 @@ def _update_dataframe(
with DataFrame.open(
sdf.uri, mode="r", context=context, platform_config=platform_config
) as sdf_r:

# Until we someday support deletes, this is the correct check on the existing,
# contiguous soma join IDs compared to the new contiguous ones about to be created.
old_jids = sorted(
Expand Down Expand Up @@ -1831,7 +1829,6 @@ def _coo_to_table(
axis: int = 0,
base: int = 0,
) -> pa.Table:

soma_dim_0 = mat_coo.row + base if base > 0 and axis == 0 else mat_coo.row
soma_dim_1 = mat_coo.col + base if base > 0 and axis == 1 else mat_coo.col

Expand Down Expand Up @@ -2058,7 +2055,6 @@ def _ingest_uns_dict(
ingestion_params: IngestionParams,
use_relative_uri: Optional[bool],
) -> None:

with _create_or_open_collection(
Collection,
_util.uri_joinpath(parent.uri, parent_key),
Expand Down Expand Up @@ -2092,7 +2088,6 @@ def _ingest_uns_node(
ingestion_params: IngestionParams,
use_relative_uri: Optional[bool],
) -> None:

if isinstance(value, np.generic):
# This is some kind of numpy scalar value. Metadata entries
# only accept native Python types, so unwrap it.
Expand Down
2 changes: 0 additions & 2 deletions apis/python/tests/test_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

# ----------------------------------------------------------------
def create_and_populate_dataframe(path: str) -> soma.DataFrame:

arrow_schema = pa.schema(
[
("foo", pa.int32()),
Expand Down Expand Up @@ -45,7 +44,6 @@ def create_and_populate_sparse_nd_array(path: str) -> soma.SparseNDArray:
with soma.SparseNDArray.create(
path, type=pa.int64(), shape=(nr, nc)
) as sparse_nd_array:

tensor = pa.SparseCOOTensor.from_numpy(
data=np.asarray([7, 8, 9]),
coords=[[0, 1], [2, 3], [3, 4]],
Expand Down
2 changes: 0 additions & 2 deletions apis/python/tests/test_dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,6 @@ def simple_data_frame(tmp_path):
with soma.DataFrame.create(
tmp_path.as_posix(), schema=schema, index_column_names=index_column_names
) as sdf:

data = {
"index": [0, 1, 2, 3],
"soma_joinid": [10, 11, 12, 13],
Expand Down Expand Up @@ -1039,7 +1038,6 @@ def test_create_platform_config_overrides(
@pytest.mark.parametrize("allows_duplicates", [False, True])
@pytest.mark.parametrize("consolidate", [False, True])
def test_timestamped_ops(tmp_path, allows_duplicates, consolidate):

uri = tmp_path.as_posix()

platform_config = {"tiledb": {"create": {"allows_duplicates": allows_duplicates}}}
Expand Down
2 changes: 0 additions & 2 deletions apis/python/tests/test_dense_nd_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ def test_dense_nd_array_read_write_tensor(tmp_path, shape: Tuple[int, ...]):

# check multiple read paths
with soma.DenseNDArray.open(tmp_path.as_posix()) as b:

t = b.read((slice(None),) * ndim, result_order="row-major")
assert t.equals(pa.Tensor.from_numpy(data))

Expand Down Expand Up @@ -313,7 +312,6 @@ def test_dense_nd_array_indexing_errors(tmp_path, io):
with soma.DenseNDArray.create(
tmp_path.as_posix(), type=pa.int64(), shape=shape
) as a:

npa = np.random.default_rng().standard_normal(np.prod(shape)).reshape(shape)

write_coords = tuple(slice(0, dim_len) for dim_len in shape)
Expand Down
5 changes: 0 additions & 5 deletions apis/python/tests/test_experiment_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

# ----------------------------------------------------------------
def create_and_populate_obs(uri: str) -> soma.DataFrame:

obs_arrow_schema = pa.schema(
[
("foo", pa.int32()),
Expand All @@ -21,7 +20,6 @@ def create_and_populate_obs(uri: str) -> soma.DataFrame:

# TODO: indexing option ...
with soma.DataFrame.create(uri, schema=obs_arrow_schema) as obs:

pydict = {}
pydict["soma_joinid"] = [0, 1, 2, 3, 4]
pydict["foo"] = [10, 20, 30, 40, 50]
Expand All @@ -35,7 +33,6 @@ def create_and_populate_obs(uri: str) -> soma.DataFrame:

# ----------------------------------------------------------------
def create_and_populate_var(uri: str) -> soma.DataFrame:

var_arrow_schema = pa.schema(
[
("quux", pa.large_string()),
Expand All @@ -44,7 +41,6 @@ def create_and_populate_var(uri: str) -> soma.DataFrame:
)

with soma.DataFrame.create(uri, schema=var_arrow_schema) as var:

pydict = {}
pydict["soma_joinid"] = [0, 1, 2, 3]
pydict["quux"] = ["zebra", "yak", "xylophone", "wapiti"]
Expand All @@ -70,7 +66,6 @@ def create_and_populate_sparse_nd_array(uri: str) -> soma.SparseNDArray:
with soma.SparseNDArray.create(
uri, type=pa.int64(), shape=[nr, nc]
) as sparse_nd_array:

tensor = pa.SparseCOOTensor.from_numpy(
data=np.asarray([7, 8, 9]),
coords=[[0, 2], [3, 1], [4, 2]],
Expand Down
2 changes: 1 addition & 1 deletion apis/python/tests/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def test_metadata_marshalling_OK(soma_object, test_value):
assert "test_value" in soma_object.metadata

val = soma_object.metadata["test_value"]
if type(test_value) is float and math.isnan(test_value):
if isinstance(test_value, float) and math.isnan(test_value):
# By definition, NaN != NaN, so we can't just compare
assert math.isnan(val)
else:
Expand Down
1 change: 0 additions & 1 deletion apis/python/tests/test_registration_signatures.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def test_signature_serdes(canned_h5ad_file, canned_anndata):


def test_compatible(canned_anndata):

# Check that zero inputs result in zero incompatibility
signatures.Signature.check_compatible({})

Expand Down
2 changes: 0 additions & 2 deletions apis/python/tests/test_tiledbobject.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import tiledbsoma as soma


# Checking that objects _do_ exist is already done (thoroughly) in other tests. Here
# we primarily focus on the negative cases.

Expand Down Expand Up @@ -109,7 +108,6 @@ def test_tiledbobject_exists_cross_types(tmp_path, name1, name2):
assert cls1.exists(uri1)

else:

uri1 = (tmp_path / name1).as_posix()
uri2 = (tmp_path / name2).as_posix()

Expand Down
1 change: 0 additions & 1 deletion apis/python/tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@


def test_uri_joinpath_file():

assert uri_joinpath("/A/", "B") == "/A/B"
assert uri_joinpath("/A/", "/B") == "/B"
assert uri_joinpath("/A/B", "C") == "/A/B/C"
Expand Down

0 comments on commit 649bf14

Please sign in to comment.