diff --git a/apis/python/src/tiledbsoma/_collection.py b/apis/python/src/tiledbsoma/_collection.py index c7eac3e6f3..cb2cd1e200 100644 --- a/apis/python/src/tiledbsoma/_collection.py +++ b/apis/python/src/tiledbsoma/_collection.py @@ -13,7 +13,6 @@ Callable, ClassVar, Dict, - Optional, Tuple, Type, TypeVar, @@ -67,9 +66,9 @@ def create( cls, uri: str, *, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + platform_config: options.PlatformConfig | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> Self: """Creates and opens a new SOMA collection in storage. @@ -147,8 +146,8 @@ def add_new_collection( key: str, kind: None = None, *, - uri: Optional[str] = ..., - platform_config: Optional[options.PlatformConfig] = ..., + uri: str | None = ..., + platform_config: options.PlatformConfig | None = ..., ) -> "Collection[AnySOMAObject]": ... @overload @@ -157,17 +156,17 @@ def add_new_collection( key: str, kind: Type[_Coll], *, - uri: Optional[str] = ..., - platform_config: Optional[options.PlatformConfig] = ..., + uri: str | None = ..., + platform_config: options.PlatformConfig | None = ..., ) -> _Coll: ... def add_new_collection( self, key: str, - kind: Optional[Type[CollectionBase]] = None, # type: ignore[type-arg] + kind: Type[CollectionBase] | None = None, # type: ignore[type-arg] *, - uri: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + uri: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> AnyTileDBCollection: """Adds a new sub-collection to this collection. @@ -226,7 +225,7 @@ def add_new_collection( DataFrame.create, exclude=("context", "tiledb_timestamp") ) def add_new_dataframe( - self, key: str, *, uri: Optional[str] = None, **kwargs: Any + self, key: str, *, uri: str | None = None, **kwargs: Any ) -> DataFrame: """Adds a new DataFrame to this collection. @@ -269,7 +268,7 @@ def add_new_dataframe( @_funcs.forwards_kwargs_to(NDArray.create, exclude=("context", "tiledb_timestamp")) def _add_new_ndarray( - self, cls: Type[_NDArr], key: str, *, uri: Optional[str] = None, **kwargs: Any + self, cls: Type[_NDArr], key: str, *, uri: str | None = None, **kwargs: Any ) -> _NDArr: """Internal implementation of common NDArray-adding operations.""" return self._add_new_element( @@ -361,7 +360,7 @@ def _add_new_element( key: str, kind: Type[_TDBO], factory: Callable[[str], _TDBO], - user_uri: Optional[str], + user_uri: str | None, ) -> _TDBO: """Handles the common parts of adding new elements. diff --git a/apis/python/src/tiledbsoma/_common_nd_array.py b/apis/python/src/tiledbsoma/_common_nd_array.py index 9b28ce5b3c..a96996509d 100644 --- a/apis/python/src/tiledbsoma/_common_nd_array.py +++ b/apis/python/src/tiledbsoma/_common_nd_array.py @@ -5,7 +5,9 @@ """Common code shared by both NDArray implementations.""" -from typing import Optional, Sequence, Tuple, Union, cast +from __future__ import annotations + +from typing import Sequence, Tuple, Union, cast import pyarrow as pa import somacore @@ -32,9 +34,9 @@ def create( *, type: pa.DataType, shape: Sequence[Union[int, None]], - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + platform_config: options.PlatformConfig | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> Self: """Creates a SOMA ``NDArray`` at the given URI. @@ -154,7 +156,7 @@ def tiledbsoma_has_upgraded_shape(self) -> bool: def _dim_capacity_and_extent( cls, dim_name: str, - dim_shape: Optional[int], + dim_shape: int | None, ndim: int, create_options: TileDBCreateOptions, ) -> Tuple[int, int]: diff --git a/apis/python/src/tiledbsoma/_dataframe.py b/apis/python/src/tiledbsoma/_dataframe.py index 973b3e01d7..50721fd1ec 100644 --- a/apis/python/src/tiledbsoma/_dataframe.py +++ b/apis/python/src/tiledbsoma/_dataframe.py @@ -6,12 +6,14 @@ """ Implementation of a SOMA DataFrame """ + +from __future__ import annotations + import inspect from typing import ( Any, Dict, List, - Optional, Sequence, Tuple, Union, @@ -151,10 +153,10 @@ def create( *, schema: pa.Schema, index_column_names: Sequence[str] = (SOMA_JOINID,), - domain: Optional[Domain] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + domain: Domain | None = None, + platform_config: options.PlatformConfig | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> "DataFrame": """Creates the data structure on disk/S3/cloud. @@ -407,7 +409,7 @@ def count(self) -> int: return cast(DataFrameWrapper, self._handle).count @property - def _maybe_soma_joinid_shape(self) -> Optional[int]: + def _maybe_soma_joinid_shape(self) -> int | None: """An internal helper method that returns the shape value along the ``soma_joinid`` index column, if the ``DataFrame has one, else ``None``. @@ -419,7 +421,7 @@ def _maybe_soma_joinid_shape(self) -> Optional[int]: return self._handle.maybe_soma_joinid_shape @property - def _maybe_soma_joinid_maxshape(self) -> Optional[int]: + def _maybe_soma_joinid_maxshape(self) -> int | None: """An internal helper method that returns the maxshape value along the ``soma_joinid`` index column, if the ``DataFrame has one, else ``None``. @@ -657,13 +659,13 @@ def __len__(self) -> int: def read( self, coords: options.SparseDFCoords = (), - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, *, result_order: options.ResultOrderStr = options.ResultOrder.AUTO, - value_filter: Optional[str] = None, + value_filter: str | None = None, batch_size: options.BatchSize = _UNBATCHED, - partitions: Optional[options.ReadPartitions] = None, - platform_config: Optional[options.PlatformConfig] = None, + partitions: options.ReadPartitions | None = None, + platform_config: options.PlatformConfig | None = None, ) -> TableReadIter: """Reads a user-defined subset of data, addressed by the dataframe indexing columns, optionally filtered, and return results as one or more `Arrow tables `_. @@ -732,7 +734,7 @@ def read( ) def write( - self, values: pa.Table, platform_config: Optional[options.PlatformConfig] = None + self, values: pa.Table, platform_config: options.PlatformConfig | None = None ) -> Self: """Writes an `Arrow table `_ to the persistent object. As duplicate index values are not allowed, index values already diff --git a/apis/python/src/tiledbsoma/_dense_nd_array.py b/apis/python/src/tiledbsoma/_dense_nd_array.py index ef65f7c9d9..5920f9d2f4 100644 --- a/apis/python/src/tiledbsoma/_dense_nd_array.py +++ b/apis/python/src/tiledbsoma/_dense_nd_array.py @@ -7,7 +7,9 @@ Implementation of SOMA DenseNDArray. """ -from typing import List, Optional, Sequence, Tuple, Union +from __future__ import annotations + +from typing import List, Sequence, Tuple, Union import numpy as np import pyarrow as pa @@ -93,9 +95,9 @@ def create( *, type: pa.DataType, shape: Sequence[Union[int, None]], - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + platform_config: options.PlatformConfig | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> Self: context = _validate_soma_tiledb_context(context) @@ -173,8 +175,8 @@ def read( coords: options.DenseNDCoords = (), *, result_order: options.ResultOrderStr = somacore.ResultOrder.ROW_MAJOR, - partitions: Optional[options.ReadPartitions] = None, - platform_config: Optional[options.PlatformConfig] = None, + partitions: options.ReadPartitions | None = None, + platform_config: options.PlatformConfig | None = None, ) -> pa.Tensor: """Reads a user-defined dense slice of the array and return as an Arrow ``Tensor``. @@ -262,7 +264,7 @@ def write( coords: options.DenseNDCoords, values: pa.Tensor, *, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Writes a subarray, defined by ``coords`` and ``values``. Will overwrite existing values in the array. @@ -326,7 +328,7 @@ def write( def _dim_capacity_and_extent( cls, dim_name: str, - dim_shape: Optional[int], + dim_shape: int | None, ndim: int, create_options: TileDBCreateOptions, ) -> Tuple[int, int]: diff --git a/apis/python/src/tiledbsoma/_eager_iter.py b/apis/python/src/tiledbsoma/_eager_iter.py index c42e52c1ab..35f3655785 100644 --- a/apis/python/src/tiledbsoma/_eager_iter.py +++ b/apis/python/src/tiledbsoma/_eager_iter.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from concurrent import futures -from typing import Iterator, Optional, TypeVar +from typing import Iterator, TypeVar _T = TypeVar("_T") @@ -8,7 +10,7 @@ class EagerIterator(Iterator[_T]): def __init__( self, iterator: Iterator[_T], - pool: Optional[futures.Executor] = None, + pool: futures.Executor | None = None, ): super().__init__() self.iterator = iterator diff --git a/apis/python/src/tiledbsoma/_experiment.py b/apis/python/src/tiledbsoma/_experiment.py index dcd297d29e..16aa2533ff 100644 --- a/apis/python/src/tiledbsoma/_experiment.py +++ b/apis/python/src/tiledbsoma/_experiment.py @@ -5,8 +5,10 @@ """Implementation of a SOMA Experiment. """ + +from __future__ import annotations + import functools -from typing import Optional from somacore import experiment, query @@ -81,8 +83,8 @@ def axis_query( # type: ignore self, measurement_name: str, *, - obs_query: Optional[query.AxisQuery] = None, - var_query: Optional[query.AxisQuery] = None, + obs_query: query.AxisQuery | None = None, + var_query: query.AxisQuery | None = None, ) -> ExperimentAxisQuery: """Creates an axis query over this experiment. Lifecycle: Maturing. diff --git a/apis/python/src/tiledbsoma/_factory.py b/apis/python/src/tiledbsoma/_factory.py index bf5ec7d2f5..508f6d4e26 100644 --- a/apis/python/src/tiledbsoma/_factory.py +++ b/apis/python/src/tiledbsoma/_factory.py @@ -7,10 +7,11 @@ Collection. """ +from __future__ import annotations + from typing import ( Callable, Dict, - Optional, Type, TypeVar, Union, @@ -55,9 +56,9 @@ def open( uri: str, mode: options.OpenMode = ..., *, - soma_type: Optional[str] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + soma_type: str | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> AnySOMAObject: ... @@ -67,8 +68,8 @@ def open( mode: options.OpenMode, *, soma_type: Type[_Obj], - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> _Obj: ... @@ -78,8 +79,8 @@ def open( mode: options.OpenMode = "r", *, soma_type: Union[Type[SOMAObject], str, None] = None, # type: ignore[type-arg] - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> AnySOMAObject: """Opens a TileDB SOMA object. @@ -145,12 +146,12 @@ def open( def _open_internal( opener: Callable[ - [str, options.OpenMode, SOMATileDBContext, Optional[OpenTimestamp]], _Wrapper + [str, options.OpenMode, SOMATileDBContext, OpenTimestamp | None], _Wrapper ], uri: str, mode: options.OpenMode, context: SOMATileDBContext, - timestamp: Optional[OpenTimestamp], + timestamp: OpenTimestamp | None, ) -> SOMAObject[_Wrapper]: """Lower-level open function for internal use only.""" handle = opener(uri, mode, context, timestamp) diff --git a/apis/python/src/tiledbsoma/_geometry_dataframe.py b/apis/python/src/tiledbsoma/_geometry_dataframe.py index 4d984ef966..fff021523d 100644 --- a/apis/python/src/tiledbsoma/_geometry_dataframe.py +++ b/apis/python/src/tiledbsoma/_geometry_dataframe.py @@ -6,8 +6,10 @@ Implementation of a SOMA Geometry DataFrame """ +from __future__ import annotations + import warnings -from typing import Any, Optional, Sequence, Tuple, Union +from typing import Any, Sequence, Tuple, Union import pyarrow as pa import somacore @@ -45,10 +47,10 @@ def create( *, schema: pa.Schema, coordinate_space: Union[Sequence[str], CoordinateSpace] = ("x", "y"), - domain: Optional[Domain] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + domain: Domain | None = None, + platform_config: options.PlatformConfig | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> Self: """Creates a new ``GeometryDataFrame`` at the given URI. @@ -91,13 +93,13 @@ def create( def read( self, coords: options.SparseDFCoords = (), - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, *, batch_size: options.BatchSize = _UNBATCHED, - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = options.ResultOrder.AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> TableReadIter: """Reads a user-defined slice of data into Arrow tables. @@ -124,16 +126,16 @@ def read( def read_spatial_region( self, - region: Optional[options.SpatialRegion] = None, - column_names: Optional[Sequence[str]] = None, + region: options.SpatialRegion | None = None, + column_names: Sequence[str] | None = None, *, - region_transform: Optional[CoordinateTransform] = None, - region_coord_space: Optional[CoordinateSpace] = None, + region_transform: CoordinateTransform | None = None, + region_coord_space: CoordinateSpace | None = None, batch_size: options.BatchSize = _UNBATCHED, - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = options.ResultOrder.AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> somacore.SpatialRead[somacore.ReadIter[pa.Table]]: """Reads data intersecting an user-defined region of space into a :class:`SpatialRead` with data in Arrow tables. @@ -174,7 +176,7 @@ def write( self, values: Union[pa.RecordBatch, pa.Table], *, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Writes the data from an Arrow table to the persistent object. diff --git a/apis/python/src/tiledbsoma/_indexer.py b/apis/python/src/tiledbsoma/_indexer.py index 0b031720bb..7d0649d2a4 100644 --- a/apis/python/src/tiledbsoma/_indexer.py +++ b/apis/python/src/tiledbsoma/_indexer.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Union +from typing import List, Union import numpy as np import numpy.typing as npt @@ -25,7 +25,7 @@ def tiledbsoma_build_index( - data: IndexerDataType, *, context: Optional[SOMATileDBContext] = None + data: IndexerDataType, *, context: SOMATileDBContext | None = None ) -> IndexLike: """Initialize re-indexer for provided indices (deprecated). @@ -52,7 +52,7 @@ class IntIndexer: """ def __init__( - self, data: IndexerDataType, *, context: Optional[SOMATileDBContext] = None + self, data: IndexerDataType, *, context: SOMATileDBContext | None = None ): """Initialize re-indexer for provided indices. diff --git a/apis/python/src/tiledbsoma/_multiscale_image.py b/apis/python/src/tiledbsoma/_multiscale_image.py index d92b4d2d03..4d4a4d466c 100644 --- a/apis/python/src/tiledbsoma/_multiscale_image.py +++ b/apis/python/src/tiledbsoma/_multiscale_image.py @@ -7,9 +7,11 @@ Implementation of a SOMA MultiscaleImage. """ +from __future__ import annotations + import json import warnings -from typing import Any, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Dict, List, Sequence, Tuple, Union import attrs import pyarrow as pa @@ -119,16 +121,16 @@ def create( type: pa.DataType, level_shape: Sequence[int], level_key: str = "level0", - level_uri: Optional[str] = None, + level_uri: str | None = None, coordinate_space: Union[Sequence[str], CoordinateSpace] = ( "x", "y", ), - data_axis_order: Optional[Sequence[str]] = None, + data_axis_order: Sequence[str] | None = None, has_channel_axis: bool = True, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + platform_config: options.PlatformConfig | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> Self: """Creates a new ``MultiscaleImage`` at the given URI. @@ -313,7 +315,7 @@ def add_new_level( self, key: str, *, - uri: Optional[str] = None, + uri: str | None = None, shape: Sequence[int], **kwargs: Any, ) -> DenseNDArray: @@ -389,7 +391,7 @@ def set( key: str, value: DenseNDArray, *, - use_relative_uri: Optional[bool] = None, + use_relative_uri: bool | None = None, ) -> Self: """Sets a new level in the multi-scale image to be an existing SOMA :class:`DenseNDArray`. @@ -422,14 +424,14 @@ def set( def read_spatial_region( self, level: Union[int, str], - region: Optional[options.SpatialRegion] = None, + region: options.SpatialRegion | None = None, *, channel_coords: options.DenseCoord = None, - region_transform: Optional[CoordinateTransform] = None, - region_coord_space: Optional[CoordinateSpace] = None, + region_transform: CoordinateTransform | None = None, + region_coord_space: CoordinateSpace | None = None, result_order: options.ResultOrderStr = options.ResultOrder.ROW_MAJOR, - data_axis_order: Optional[Sequence[str]] = None, - platform_config: Optional[options.PlatformConfig] = None, + data_axis_order: Sequence[str] | None = None, + platform_config: options.PlatformConfig | None = None, ) -> somacore.SpatialRead[pa.Tensor]: """Reads a user-defined spatial region from a specific level of the ``MultiscaleImage``. diff --git a/apis/python/src/tiledbsoma/_point_cloud_dataframe.py b/apis/python/src/tiledbsoma/_point_cloud_dataframe.py index df5bd44baa..fa35123435 100644 --- a/apis/python/src/tiledbsoma/_point_cloud_dataframe.py +++ b/apis/python/src/tiledbsoma/_point_cloud_dataframe.py @@ -6,8 +6,10 @@ Implementation of a SOMA Point Cloud DataFrame """ +from __future__ import annotations + import warnings -from typing import Any, Optional, Sequence, Tuple, Union, cast +from typing import Any, Sequence, Tuple, Union, cast import pyarrow as pa import somacore @@ -69,10 +71,10 @@ def create( *, schema: pa.Schema, coordinate_space: Union[Sequence[str], CoordinateSpace] = ("x", "y"), - domain: Optional[Domain] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + domain: Domain | None = None, + platform_config: options.PlatformConfig | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> Self: """Creates a new ``PointCloudDataFrame`` at the given URI. @@ -118,7 +120,7 @@ def create( """ warnings.warn(SPATIAL_DISCLAIMER) - axis_dtype: Optional[pa.DataType] = None + axis_dtype: pa.DataType | None = None if not isinstance(coordinate_space, CoordinateSpace): coordinate_space = CoordinateSpace.from_axis_names(coordinate_space) index_column_names = coordinate_space.axis_names @@ -298,13 +300,13 @@ def count(self) -> int: def read( self, coords: options.SparseDFCoords = (), - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, *, batch_size: options.BatchSize = _UNBATCHED, - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = options.ResultOrder.AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> TableReadIter: """Reads a user-defined slice of data into Arrow tables. @@ -343,16 +345,16 @@ def read( def read_spatial_region( self, - region: Optional[options.SpatialRegion] = None, - column_names: Optional[Sequence[str]] = None, + region: options.SpatialRegion | None = None, + column_names: Sequence[str] | None = None, *, - region_transform: Optional[CoordinateTransform] = None, - region_coord_space: Optional[CoordinateSpace] = None, + region_transform: CoordinateTransform | None = None, + region_coord_space: CoordinateSpace | None = None, batch_size: options.BatchSize = _UNBATCHED, - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = options.ResultOrder.AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> somacore.SpatialRead[somacore.ReadIter[pa.Table]]: """Reads data intersecting a user-defined region of space into a :class:`SpatialRead` with data in Arrow tables. @@ -444,7 +446,7 @@ def write( self, values: Union[pa.RecordBatch, pa.Table], *, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Writes the data from an Arrow table to the persistent object. diff --git a/apis/python/src/tiledbsoma/_query.py b/apis/python/src/tiledbsoma/_query.py index 074ce4bb07..0352855587 100644 --- a/apis/python/src/tiledbsoma/_query.py +++ b/apis/python/src/tiledbsoma/_query.py @@ -5,6 +5,9 @@ """Implementation of a SOMA Experiment. """ + +from __future__ import annotations + import enum import warnings from concurrent.futures import ThreadPoolExecutor @@ -15,7 +18,6 @@ Dict, Literal, Mapping, - Optional, Protocol, Sequence, TypeVar, @@ -120,8 +122,8 @@ class AxisIndexer(query.AxisIndexer): query: "ExperimentAxisQuery" _index_factory: IndexFactory - _cached_obs: Optional[IndexLike] = None - _cached_var: Optional[IndexLike] = None + _cached_obs: IndexLike | None = None + _cached_var: IndexLike | None = None @property def _obs_index(self) -> IndexLike: @@ -243,11 +245,11 @@ def __init__( def obs( self, *, - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, batch_size: BatchSize = BatchSize(), - partitions: Optional[ReadPartitions] = None, + partitions: ReadPartitions | None = None, result_order: ResultOrderStr = _RO_AUTO, - platform_config: Optional[PlatformConfig] = None, + platform_config: PlatformConfig | None = None, ) -> ReadIter[pa.Table]: """Returns ``obs`` as an `Arrow table `_ @@ -269,11 +271,11 @@ def obs( def var( self, *, - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, batch_size: BatchSize = BatchSize(), - partitions: Optional[ReadPartitions] = None, + partitions: ReadPartitions | None = None, result_order: ResultOrderStr = _RO_AUTO, - platform_config: Optional[PlatformConfig] = None, + platform_config: PlatformConfig | None = None, ) -> ReadIter[pa.Table]: """Returns ``var`` as an `Arrow table `_ @@ -335,9 +337,9 @@ def X( layer_name: str, *, batch_size: BatchSize = BatchSize(), - partitions: Optional[ReadPartitions] = None, + partitions: ReadPartitions | None = None, result_order: ResultOrderStr = _RO_AUTO, - platform_config: Optional[PlatformConfig] = None, + platform_config: PlatformConfig | None = None, ) -> SparseRead: """Returns an ``X`` layer as a sparse read. @@ -457,7 +459,7 @@ def to_anndata( self, X_name: str, *, - column_names: Optional[AxisColumnNames] = None, + column_names: AxisColumnNames | None = None, X_layers: Sequence[str] = (), obsm_layers: Sequence[str] = (), obsp_layers: Sequence[str] = (), @@ -490,7 +492,7 @@ def to_spatialdata( # type: ignore[no-untyped-def] self, X_name: str, *, - column_names: Optional[AxisColumnNames] = None, + column_names: AxisColumnNames | None = None, X_layers: Sequence[str] = (), obsm_layers: Sequence[str] = (), obsp_layers: Sequence[str] = (), @@ -851,8 +853,8 @@ class JoinIDCache: owner: ExperimentAxisQuery - _cached_obs: Optional[pa.IntegerArray] = None - _cached_var: Optional[pa.IntegerArray] = None + _cached_obs: pa.IntegerArray | None = None + _cached_var: pa.IntegerArray | None = None def _is_cached(self, axis: Axis) -> bool: field = "_cached_" + axis.value diff --git a/apis/python/src/tiledbsoma/_query_condition.py b/apis/python/src/tiledbsoma/_query_condition.py index ed1c99e890..8bcd2d7429 100644 --- a/apis/python/src/tiledbsoma/_query_condition.py +++ b/apis/python/src/tiledbsoma/_query_condition.py @@ -6,8 +6,11 @@ """A high level wrapper around the Pybind11 query_condition.cc implementation for filtering query results on attribute values. """ + +from __future__ import annotations + import ast -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Any, Callable, List, Tuple, Union import attrs import numpy as np @@ -128,7 +131,7 @@ def __attrs_post_init__(self): def init_query_condition( self, schema: pa.Schema, - query_attrs: Optional[List[str]], + query_attrs: List[str] | None, ): try: qctree = QueryConditionTree(schema, query_attrs) diff --git a/apis/python/src/tiledbsoma/_read_iters.py b/apis/python/src/tiledbsoma/_read_iters.py index 92a8198ca7..11adbc7c14 100644 --- a/apis/python/src/tiledbsoma/_read_iters.py +++ b/apis/python/src/tiledbsoma/_read_iters.py @@ -5,6 +5,7 @@ """Read iterators. """ + from __future__ import annotations import abc @@ -15,7 +16,6 @@ Any, Iterator, List, - Optional, Sequence, Tuple, TypeVar, @@ -73,10 +73,10 @@ def __init__( coords: Union[ options.SparseDFCoords, options.SparseNDCoords, options.DenseNDCoords ], - column_names: Optional[Sequence[str]], + column_names: Sequence[str] | None, result_order: clib.ResultOrder, - value_filter: Optional[str], - platform_config: Optional[options.PlatformConfig], + value_filter: str | None, + platform_config: options.PlatformConfig | None, ): """Initalizes a new TableReadIter for SOMAArrays. @@ -90,7 +90,7 @@ def __init__( for each index dimension, which rows to read. ``()`` means no constraint -- all IDs. - column_names (Optional[Sequence[str]]): + column_names (Sequence[str] | None): The named columns to read and return. ``None`` means no constraint -- all column names. @@ -98,10 +98,10 @@ def __init__( Order of read results. This can be one of automatic, rowmajor, or colmajor. - value_filter (Optional[str]): + value_filter (str | None): An optional [value filter] to apply to the results. - platform_config (Optional[options.PlatformConfig]): + platform_config (options.PlatformConfig | None): Pass in parameters for tuning reads. """ @@ -131,12 +131,12 @@ def __init__( coords: options.SparseNDCoords, axis: Union[int, Sequence[int]], result_order: clib.ResultOrder, - platform_config: Optional[options.PlatformConfig], + platform_config: options.PlatformConfig | None, *, - size: Optional[Union[int, Sequence[int]]] = None, - reindex_disable_on_axis: Optional[Union[int, Sequence[int]]] = None, + size: int | Sequence[int] | None = None, + reindex_disable_on_axis: int | Sequence[int] | None = None, eager: bool = True, - context: Optional[SOMATileDBContext] = None, + context: SOMATileDBContext | None = None, ): super().__init__() @@ -192,8 +192,8 @@ def _validate_args( cls, shape: Union[NTuple, Sequence[int]], axis: Union[int, Sequence[int]], - size: Optional[Union[int, Sequence[int]]] = None, - reindex_disable_on_axis: Optional[Union[int, Sequence[int]]] = None, + size: int | Sequence[int] | None = None, + reindex_disable_on_axis: int | Sequence[int] | None = None, ) -> Tuple[List[int], List[int], List[int]]: """ Class method to validate and normalize common user-provided arguments axis, size and reindex_disable_on_axis. @@ -262,7 +262,7 @@ def concat(self) -> _RT: raise NotImplementedError("Blockwise iterators do not support concat operation") def _maybe_eager_iterator( - self, x: Iterator[_EagerRT], _pool: Optional[ThreadPoolExecutor] = None + self, x: Iterator[_EagerRT], _pool: ThreadPoolExecutor | None = None ) -> Iterator[_EagerRT]: """Private""" return EagerIterator(x, pool=_pool) if self.eager else x @@ -292,7 +292,7 @@ def _table_reader(self) -> Iterator[BlockwiseTableReadIterResult]: def _reindexed_table_reader( self, - _pool: Optional[ThreadPoolExecutor] = None, + _pool: ThreadPoolExecutor | None = None, ) -> Iterator[BlockwiseTableReadIterResult]: """Private. Blockwise table reader w/ reindexing. Helper function for sub-class use""" for tbl, coords in self._maybe_eager_iterator(self._table_reader(), _pool): @@ -335,13 +335,13 @@ def __init__( coords: options.SparseNDCoords, axis: Union[int, Sequence[int]], result_order: clib.ResultOrder, - platform_config: Optional[options.PlatformConfig], + platform_config: options.PlatformConfig | None, *, - size: Optional[Union[int, Sequence[int]]] = None, - reindex_disable_on_axis: Optional[Union[int, Sequence[int]]] = None, + size: int | Sequence[int] | None = None, + reindex_disable_on_axis: int | Sequence[int] | None = None, eager: bool = True, compress: bool = True, - context: Optional[SOMATileDBContext] = None, + context: SOMATileDBContext | None = None, ): self.compress = compress self.context = context @@ -390,7 +390,7 @@ def _create_reader(self) -> Iterator[BlockwiseScipyReadIterResult]: ) def _sorted_tbl_reader( - self, _pool: Optional[ThreadPoolExecutor] = None + self, _pool: ThreadPoolExecutor | None = None ) -> Iterator[Tuple[IJDType, IndicesType]]: """Private. Read reindexed tables and sort them. Yield as ((i,j),d)""" for coo_tbl, indices in self._maybe_eager_iterator( @@ -424,7 +424,7 @@ def _mk_shape( return cast(Tuple[int, int], tuple(_sp_shape)) def _coo_reader( - self, _pool: Optional[ThreadPoolExecutor] = None + self, _pool: ThreadPoolExecutor | None = None ) -> Iterator[Tuple[sparse.coo_matrix, IndicesType]]: """Private. Uncompressed variants""" assert not self.compress @@ -446,7 +446,7 @@ def _coo_reader( yield sp, indices def _cs_reader( - self, _pool: Optional[ThreadPoolExecutor] = None + self, _pool: ThreadPoolExecutor | None = None ) -> Iterator[Tuple[Union[sparse.csr_matrix, sparse.csc_matrix], IndicesType],]: """Private. Compressed sparse variants""" assert self.compress @@ -479,7 +479,7 @@ def __init__( coords: options.SparseDFCoords, shape: NTuple, result_order: clib.ResultOrder, - platform_config: Optional[options.PlatformConfig], + platform_config: options.PlatformConfig | None, ): self.array = array self.coords = coords @@ -549,10 +549,10 @@ def __init__( coords: Union[ options.SparseDFCoords, options.SparseNDCoords, options.DenseNDCoords ], - column_names: Optional[Sequence[str]], + column_names: Sequence[str] | None, result_order: clib.ResultOrder, - value_filter: Optional[str], - platform_config: Optional[options.PlatformConfig], + value_filter: str | None, + platform_config: options.PlatformConfig | None, ): clib_handle = array._handle._handle @@ -628,6 +628,6 @@ def _coords_strider( _ElemT = TypeVar("_ElemT") -def _pad_with_none(s: Sequence[_ElemT], to_length: int) -> Tuple[Optional[_ElemT], ...]: +def _pad_with_none(s: Sequence[_ElemT], to_length: int) -> Tuple[_ElemT | None, ...]: """Given a sequence, pad length to a user-specified length, with None values""" return tuple(s[i] if i < len(s) else None for i in range(to_length)) diff --git a/apis/python/src/tiledbsoma/_scene.py b/apis/python/src/tiledbsoma/_scene.py index 71aabdda27..d107a65fa8 100644 --- a/apis/python/src/tiledbsoma/_scene.py +++ b/apis/python/src/tiledbsoma/_scene.py @@ -6,8 +6,10 @@ Implementation of a SOMA Scene """ +from __future__ import annotations + import warnings -from typing import Any, List, Optional, Sequence, Tuple, Type, TypeVar, Union +from typing import Any, List, Sequence, Tuple, Type, TypeVar, Union import somacore from somacore import ( @@ -71,10 +73,10 @@ def create( cls, uri: str, *, - coordinate_space: Optional[Union[Sequence[str], CoordinateSpace]] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + coordinate_space: Sequence[str] | CoordinateSpace | None = None, + platform_config: options.PlatformConfig | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> Self: """Creates a new scene at the given URI. @@ -135,7 +137,7 @@ def __init__( super().__init__(handle, **kwargs) coord_space = self.metadata.get(SOMA_COORDINATE_SPACE_METADATA_KEY) if coord_space is None: - self._coord_space: Optional[CoordinateSpace] = None + self._coord_space: CoordinateSpace | None = None else: self._coord_space = coordinate_space_from_json(coord_space) @@ -168,7 +170,7 @@ def _set_transform_to_element( key: str, transform: CoordinateTransform, subcollection: Union[str, Sequence[str]], - coordinate_space: Optional[CoordinateSpace], + coordinate_space: CoordinateSpace | None, ) -> _SE: # Check the transform is compatible with the coordinate spaces of the scene # and the new element coordinate space (if provided). @@ -221,7 +223,7 @@ def _set_transform_to_element( return elem @property - def coordinate_space(self) -> Optional[CoordinateSpace]: + def coordinate_space(self) -> CoordinateSpace | None: """Coordinate system for this scene. Lifecycle: @@ -246,8 +248,8 @@ def add_new_geometry_dataframe( key: str, subcollection: Union[str, Sequence[str]], *, - transform: Optional[CoordinateTransform], - uri: Optional[str] = None, + transform: CoordinateTransform | None, + uri: str | None = None, **kwargs: Any, ) -> GeometryDataFrame: """Adds a ``GeometryDataFrame`` to the scene and sets a coordinate transform @@ -290,8 +292,8 @@ def add_new_multiscale_image( key: str, subcollection: Union[str, Sequence[str]], *, - transform: Optional[CoordinateTransform], - uri: Optional[str] = None, + transform: CoordinateTransform | None, + uri: str | None = None, coordinate_space: Union[Sequence[str], CoordinateSpace] = ("x", "y"), **kwargs: Any, ) -> MultiscaleImage: @@ -381,8 +383,8 @@ def add_new_point_cloud_dataframe( key: str, subcollection: Union[str, Sequence[str]], *, - transform: Optional[CoordinateTransform], - uri: Optional[str] = None, + transform: CoordinateTransform | None, + uri: str | None = None, coordinate_space: Union[Sequence[str], CoordinateSpace] = ("x", "y"), **kwargs: Any, ) -> PointCloudDataFrame: @@ -473,7 +475,7 @@ def set_transform_to_geometry_dataframe( subcollection: Union[str, Sequence[str]] = "obsl", *, transform: CoordinateTransform, - coordinate_space: Optional[CoordinateSpace] = None, + coordinate_space: CoordinateSpace | None = None, ) -> GeometryDataFrame: """Adds the coordinate transform for the scene coordinate space to a geometry dataframe stored in the scene. @@ -508,7 +510,7 @@ def set_transform_to_multiscale_image( subcollection: Union[str, Sequence[str]] = "img", *, transform: CoordinateTransform, - coordinate_space: Optional[CoordinateSpace] = None, + coordinate_space: CoordinateSpace | None = None, ) -> MultiscaleImage: """Adds the coordinate transform for the scene coordinate space to a multiscale image stored in the scene. @@ -545,7 +547,7 @@ def set_transform_to_point_cloud_dataframe( subcollection: Union[str, Sequence[str]] = "obsl", *, transform: CoordinateTransform, - coordinate_space: Optional[CoordinateSpace] = None, + coordinate_space: CoordinateSpace | None = None, ) -> PointCloudDataFrame: """Adds the coordinate transform for the scene coordinate space to a point cloud dataframe stored in the scene. @@ -605,7 +607,7 @@ def get_transform_from_multiscale_image( key: str, subcollection: str = "img", *, - level: Optional[Union[str, int]] = None, + level: str | int | None = None, ) -> CoordinateTransform: """Returns the coordinate transformation from the requested multiscale image to the scene. @@ -701,7 +703,7 @@ def get_transform_to_multiscale_image( key: str, subcollection: str = "img", *, - level: Optional[Union[str, int]] = None, + level: str | int | None = None, ) -> CoordinateTransform: """Returns the coordinate transformation from the scene to a requested multiscale image. diff --git a/apis/python/src/tiledbsoma/_soma_group.py b/apis/python/src/tiledbsoma/_soma_group.py index c56fbba370..0526ce7544 100644 --- a/apis/python/src/tiledbsoma/_soma_group.py +++ b/apis/python/src/tiledbsoma/_soma_group.py @@ -3,6 +3,8 @@ # # Licensed under the MIT License. +from __future__ import annotations + import re from typing import ( Any, @@ -10,7 +12,6 @@ Generic, Iterable, Iterator, - Optional, Set, Type, TypeVar, @@ -44,7 +45,7 @@ class _CachedElement: """Item we have loaded in the cache of a collection.""" entry: _tdb_handles.GroupEntry - soma: Optional[AnySOMAObject] = None + soma: AnySOMAObject | None = None """The reified object, if it has been opened.""" @@ -188,7 +189,7 @@ def _add_new_element( key: str, kind: Type[_TDBO], factory: Callable[[str], _TDBO], - user_uri: Optional[str], + user_uri: str | None, ) -> _TDBO: """Handles the common parts of adding new elements. @@ -220,7 +221,7 @@ def _add_new_element( self._close_stack.enter_context(child) return child - def _new_child_uri(self, *, key: str, user_uri: Optional[str]) -> "_ChildURI": + def _new_child_uri(self, *, key: str, user_uri: str | None) -> "_ChildURI": maybe_relative_uri = user_uri or _sanitize_for_path(key) if not is_relative_uri(maybe_relative_uri): # It's an absolute URI. @@ -249,7 +250,7 @@ def set( key: str, value: CollectionElementType, *, - use_relative_uri: Optional[bool] = None, + use_relative_uri: bool | None = None, ) -> Self: """Adds an element to the collection. diff --git a/apis/python/src/tiledbsoma/_soma_object.py b/apis/python/src/tiledbsoma/_soma_object.py index 3c2776036f..ed5ba2fbe4 100644 --- a/apis/python/src/tiledbsoma/_soma_object.py +++ b/apis/python/src/tiledbsoma/_soma_object.py @@ -3,9 +3,11 @@ # # Licensed under the MIT License. +from __future__ import annotations + import datetime from contextlib import ExitStack -from typing import Any, Generic, MutableMapping, Optional, Type, TypeVar, Union +from typing import Any, Generic, MutableMapping, Type, TypeVar, Union import somacore from somacore import options @@ -59,10 +61,10 @@ def open( uri: str, mode: options.OpenMode = "r", *, - tiledb_timestamp: Optional[OpenTimestamp] = None, - context: Optional[SOMATileDBContext] = None, - platform_config: Optional[options.PlatformConfig] = None, - clib_type: Optional[str] = None, + tiledb_timestamp: OpenTimestamp | None = None, + context: SOMATileDBContext | None = None, + platform_config: options.PlatformConfig | None = None, + clib_type: str | None = None, ) -> Self: """Opens this specific type of SOMA object. @@ -149,7 +151,7 @@ def __init__( self._close_stack.enter_context(self._handle) def reopen( - self, mode: options.OpenMode, tiledb_timestamp: Optional[OpenTimestamp] = None + self, mode: options.OpenMode, tiledb_timestamp: OpenTimestamp | None = None ) -> Self: """ Return a new copy of the SOMAObject with the given mode at the current @@ -283,8 +285,8 @@ def tiledb_timestamp_ms(self) -> int: def exists( cls, uri: str, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> bool: """ Finds whether an object of this type exists at the given URI. diff --git a/apis/python/src/tiledbsoma/_sparse_nd_array.py b/apis/python/src/tiledbsoma/_sparse_nd_array.py index 60891c9629..2e5638ba5f 100644 --- a/apis/python/src/tiledbsoma/_sparse_nd_array.py +++ b/apis/python/src/tiledbsoma/_sparse_nd_array.py @@ -6,12 +6,12 @@ """ Implementation of SOMA SparseNDArray. """ + from __future__ import annotations import itertools from typing import ( Dict, - Optional, Sequence, Tuple, Union, @@ -116,9 +116,9 @@ def create( *, type: pa.DataType, shape: Sequence[Union[int, None]], - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[SOMATileDBContext] = None, - tiledb_timestamp: Optional[OpenTimestamp] = None, + platform_config: options.PlatformConfig | None = None, + context: SOMATileDBContext | None = None, + tiledb_timestamp: OpenTimestamp | None = None, ) -> Self: context = _validate_soma_tiledb_context(context) @@ -228,8 +228,8 @@ def read( *, result_order: options.ResultOrderStr = options.ResultOrder.AUTO, batch_size: options.BatchSize = _UNBATCHED, - partitions: Optional[options.ReadPartitions] = None, - platform_config: Optional[PlatformConfig] = None, + partitions: options.ReadPartitions | None = None, + platform_config: PlatformConfig | None = None, ) -> "SparseNDArrayRead": """Reads a user-defined slice of the :class:`SparseNDArray`. @@ -285,7 +285,7 @@ def write( pa.Table, ], *, - platform_config: Optional[PlatformConfig] = None, + platform_config: PlatformConfig | None = None, ) -> Self: """ Writes an Arrow object to the SparseNDArray. @@ -412,7 +412,7 @@ def write( def _dim_capacity_and_extent( cls, dim_name: str, - dim_shape: Optional[int], + dim_shape: int | None, ndim: int, # not needed for sparse create_options: TileDBCreateOptions, ) -> Tuple[int, int]: @@ -516,7 +516,7 @@ def __init__( array: SparseNDArray, coords: options.SparseNDCoords, result_order: clib.ResultOrder, - platform_config: Optional[options.PlatformConfig], + platform_config: options.PlatformConfig | None, ): """ Lifecycle: @@ -546,7 +546,7 @@ class SparseNDArrayRead(_SparseNDArrayReadBase): Maturing. """ - def coos(self, shape: Optional[NTuple] = None) -> SparseCOOTensorReadIter: + def coos(self, shape: NTuple | None = None) -> SparseCOOTensorReadIter: """ Returns an iterator of `Arrow SparseCOOTensor `_. @@ -589,8 +589,8 @@ def blockwise( self, axis: Union[int, Sequence[int]], *, - size: Optional[Union[int, Sequence[int]]] = None, - reindex_disable_on_axis: Optional[Union[int, Sequence[int]]] = None, + size: int | Sequence[int] | None = None, + reindex_disable_on_axis: int | Sequence[int] | None = None, eager: bool = True, ) -> SparseNDArrayBlockwiseRead: """ @@ -672,10 +672,10 @@ def __init__( coords: options.SparseNDCoords, axis: Union[int, Sequence[int]], result_order: clib.ResultOrder, - platform_config: Optional[options.PlatformConfig], + platform_config: options.PlatformConfig | None, *, - size: Optional[Union[int, Sequence[int]]], - reindex_disable_on_axis: Optional[Union[int, Sequence[int]]], + size: int | Sequence[int] | None, + reindex_disable_on_axis: int | Sequence[int] | None, eager: bool = True, ): super().__init__(array, coords, result_order, platform_config) diff --git a/apis/python/src/tiledbsoma/_spatial_dataframe.py b/apis/python/src/tiledbsoma/_spatial_dataframe.py index 3c6abba155..18490ea77e 100644 --- a/apis/python/src/tiledbsoma/_spatial_dataframe.py +++ b/apis/python/src/tiledbsoma/_spatial_dataframe.py @@ -6,7 +6,10 @@ """ Implementation of a base class shared between GeometryDataFrame and PointCloudDataFrame """ -from typing import Any, Optional, Sequence, Tuple, Union + +from __future__ import annotations + +from typing import Any, Sequence, Tuple, Union import pyarrow as pa import somacore @@ -68,13 +71,13 @@ def domain(self) -> Tuple[Tuple[Any, Any], ...]: def read( self, coords: options.SparseDFCoords = (), - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, *, result_order: options.ResultOrderStr = options.ResultOrder.AUTO, - value_filter: Optional[str] = None, + value_filter: str | None = None, batch_size: options.BatchSize = _UNBATCHED, - partitions: Optional[options.ReadPartitions] = None, - platform_config: Optional[options.PlatformConfig] = None, + partitions: options.ReadPartitions | None = None, + platform_config: options.PlatformConfig | None = None, ) -> TableReadIter: """Reads a user-defined slice of data into Arrow tables. @@ -101,16 +104,16 @@ def read( def read_spatial_region( self, - region: Optional[options.SpatialRegion] = None, - column_names: Optional[Sequence[str]] = None, + region: options.SpatialRegion | None = None, + column_names: Sequence[str] | None = None, *, - region_transform: Optional[CoordinateTransform] = None, - region_coord_space: Optional[CoordinateSpace] = None, + region_transform: CoordinateTransform | None = None, + region_coord_space: CoordinateSpace | None = None, batch_size: options.BatchSize = _UNBATCHED, - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = options.ResultOrder.AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> somacore.SpatialRead[somacore.ReadIter[pa.Table]]: """Reads data intersecting an user-defined region of space into a :class:`SpatialRead` with data in Arrow tables. @@ -151,7 +154,7 @@ def write( self, values: Union[pa.RecordBatch, pa.Table], *, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Writes the data from an Arrow table to the persistent object. diff --git a/apis/python/src/tiledbsoma/_spatial_util.py b/apis/python/src/tiledbsoma/_spatial_util.py index a16a364383..9d5ce7bb59 100644 --- a/apis/python/src/tiledbsoma/_spatial_util.py +++ b/apis/python/src/tiledbsoma/_spatial_util.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import json -from typing import Any, Dict, Optional, Tuple, Type +from typing import Any, Dict, Tuple, Type import numpy as np import pyarrow as pa @@ -113,17 +115,17 @@ def transform_region( def process_image_region( - region: Optional[options.SpatialRegion], + region: options.SpatialRegion | None, transform: somacore.CoordinateTransform, channel_coords: options.DenseCoord, data_order: Tuple[int, ...], ) -> Tuple[ - options.DenseNDCoords, Optional[options.SpatialRegion], somacore.CoordinateTransform + options.DenseNDCoords, options.SpatialRegion | None, somacore.CoordinateTransform ]: if region is None: # Select the full region. - data_region: Optional[options.SpatialRegion] = None + data_region: options.SpatialRegion | None = None x_coords: options.DenseCoord = None y_coords: options.DenseCoord = None else: @@ -173,7 +175,7 @@ def process_image_region( def process_spatial_df_region( - region: Optional[options.SpatialRegion], + region: options.SpatialRegion | None, transform: somacore.CoordinateTransform, coords_by_name: Dict[str, options.SparseDFCoord], index_columns: Tuple[str, ...], @@ -181,7 +183,7 @@ def process_spatial_df_region( schema: pa.Schema, ) -> Tuple[ options.SparseDFCoords, - Optional[options.SpatialRegion], + options.SpatialRegion | None, somacore.CoordinateTransform, ]: # Check provided coords are valid. @@ -194,7 +196,7 @@ def process_spatial_df_region( # to the coords_by_name map. if region is None: # Leave spatial coords as None - this will select the entire region. - data_region: Optional[options.SpatialRegion] = None + data_region: options.SpatialRegion | None = None else: # Restricted to guarantee data region is a box. if isinstance(region, shapely.GeometryType): diff --git a/apis/python/src/tiledbsoma/_tdb_handles.py b/apis/python/src/tiledbsoma/_tdb_handles.py index f7b00bf654..7efc998068 100644 --- a/apis/python/src/tiledbsoma/_tdb_handles.py +++ b/apis/python/src/tiledbsoma/_tdb_handles.py @@ -8,6 +8,8 @@ ``open``, ``ArrayWrapper.open``, ``GroupWrapper.open`` are the important parts. """ +from __future__ import annotations + import abc import enum from typing import ( @@ -18,7 +20,6 @@ List, Mapping, MutableMapping, - Optional, Sequence, Tuple, Type, @@ -64,8 +65,8 @@ def open( uri: str, mode: options.OpenMode, context: SOMATileDBContext, - timestamp: Optional[OpenTimestamp], - clib_type: Optional[str] = None, + timestamp: OpenTimestamp | None, + clib_type: str | None = None, ) -> "Wrapper[RawHandle]": """Determine whether the URI is an array or group, and open it.""" open_mode = clib.OpenMode.read if mode == "r" else clib.OpenMode.write @@ -140,7 +141,7 @@ class Wrapper(Generic[_RawHdl_co], metaclass=abc.ABCMeta): timestamp_ms: int _handle: _RawHdl_co closed: bool = attrs.field(default=False, init=False) - clib_type: Optional[str] = None + clib_type: str | None = None @classmethod def open( @@ -148,7 +149,7 @@ def open( uri: str, mode: options.OpenMode, context: SOMATileDBContext, - timestamp: Optional[OpenTimestamp], + timestamp: OpenTimestamp | None, ) -> Self: if mode not in ("r", "w"): raise ValueError(f"Invalid open mode {mode!r}") @@ -202,7 +203,7 @@ def _opener( raise NotImplementedError() def reopen( - self, mode: options.OpenMode, timestamp: Optional[OpenTimestamp] + self, mode: options.OpenMode, timestamp: OpenTimestamp | None ) -> clib.SOMAObject: if mode not in ("r", "w"): raise ValueError( @@ -443,12 +444,12 @@ def maxshape(self) -> Tuple[int, ...]: return cast(Tuple[int, ...], tuple(self._handle.maxshape)) @property - def maybe_soma_joinid_shape(self) -> Optional[int]: + def maybe_soma_joinid_shape(self) -> int | None: """Only implemented for DataFrame.""" raise NotImplementedError @property - def maybe_soma_joinid_maxshape(self) -> Optional[int]: + def maybe_soma_joinid_maxshape(self) -> int | None: """Only implemented for DataFrame.""" raise NotImplementedError @@ -542,14 +543,14 @@ def write(self, values: pa.RecordBatch) -> None: self._handle.write(values) @property - def maybe_soma_joinid_shape(self) -> Optional[int]: + def maybe_soma_joinid_shape(self) -> int | None: """Wrapper-class internals""" - return cast(Optional[int], self._handle.maybe_soma_joinid_shape) + return cast(Union[int, None], self._handle.maybe_soma_joinid_shape) @property - def maybe_soma_joinid_maxshape(self) -> Optional[int]: + def maybe_soma_joinid_maxshape(self) -> int | None: """Wrapper-class internals""" - return cast(Optional[int], self._handle.maybe_soma_joinid_maxshape) + return cast(Union[int, None], self._handle.maybe_soma_joinid_maxshape) @property def tiledbsoma_has_upgraded_domain(self) -> bool: diff --git a/apis/python/src/tiledbsoma/_util.py b/apis/python/src/tiledbsoma/_util.py index e007ab9c16..9bce8a7e8b 100644 --- a/apis/python/src/tiledbsoma/_util.py +++ b/apis/python/src/tiledbsoma/_util.py @@ -3,6 +3,8 @@ # # Licensed under the MIT License. +from __future__ import annotations + import datetime import json import pathlib @@ -15,7 +17,6 @@ Dict, List, Mapping, - Optional, Sequence, Tuple, Type, @@ -149,7 +150,7 @@ class NonNumericDimensionError(TypeError): def slice_to_numeric_range( slc: Slice[Any], domain: Tuple[_T, _T] -) -> Optional[Tuple[_T, _T]]: +) -> Tuple[_T, _T] | None: """Constrains the given slice to the ``domain`` for numeric dimensions. We assume the slice has already been validated by validate_slice. @@ -244,7 +245,7 @@ def check_type( ) -def check_unpartitioned(partitions: Optional[options.ReadPartitions]) -> None: +def check_unpartitioned(partitions: options.ReadPartitions | None) -> None: """Ensures that we're not being asked for a partitioned read. Because we currently don't support partitioned reads, we should reject all @@ -325,7 +326,7 @@ def cast_values_to_target_schema(values: pa.Table, schema: pa.Schema) -> pa.Tabl def build_clib_platform_config( - platform_config: Optional[options.PlatformConfig], + platform_config: options.PlatformConfig | None, ) -> clib.PlatformConfig: """ Copy over Python PlatformConfig values to the C++ clib.PlatformConfig @@ -352,7 +353,7 @@ def build_clib_platform_config( return plt_cfg -def _build_column_config(col: Optional[Mapping[str, _ColumnConfig]]) -> str: +def _build_column_config(col: Mapping[str, _ColumnConfig] | None) -> str: column_config: Dict[str, Dict[str, Union[_JSONFilterList, int]]] = dict() if col is None: @@ -370,7 +371,7 @@ def _build_column_config(col: Optional[Mapping[str, _ColumnConfig]]) -> str: def _build_filter_list( - filters: Optional[Tuple[_DictFilterSpec, ...]], return_json: bool = True + filters: Tuple[_DictFilterSpec, ...] | None, return_json: bool = True ) -> _JSONFilterList: _convert_filter = { "GzipFilter": "GZIP", diff --git a/apis/python/src/tiledbsoma/logging.py b/apis/python/src/tiledbsoma/logging.py index 4c00810ebd..183f05c785 100644 --- a/apis/python/src/tiledbsoma/logging.py +++ b/apis/python/src/tiledbsoma/logging.py @@ -3,8 +3,9 @@ # # Licensed under the MIT License. +from __future__ import annotations + import logging -from typing import Optional logger = logging.getLogger("tiledbsoma") @@ -55,7 +56,7 @@ def log_io_same(message: str) -> None: log_io(message, message) -def log_io(info_message: Optional[str], debug_message: str) -> None: +def log_io(info_message: str | None, debug_message: str) -> None: """Data-ingestion timeframes range widely. Some folks won't want details for smaller uploads; some will want details for larger ones. For I/O and for I/O only, it's helpful to print a short message at INFO level,