From 09007c05983bf2091873525f51747b7b82cfd326 Mon Sep 17 00:00:00 2001 From: caniko Date: Tue, 18 Jul 2023 13:19:29 +0200 Subject: [PATCH] Feat: Add Pydantic V2 support Refactor: Naming changes for typing, and package reorganization Chore: Bump version, Major -> Minor --- .github/workflows/python-package.yml | 28 - .github/workflows/test.yaml | 63 +++ README.md | 69 ++- poetry.lock | 487 ++++++++---------- pydantic_numpy/__init__.py | 5 +- pydantic_numpy/dtype.py | 167 ------ pydantic_numpy/helper/__init__.py | 0 pydantic_numpy/helper/annotation.py | 155 ++++++ pydantic_numpy/helper/validation.py | 123 +++++ pydantic_numpy/model.py | 148 ------ pydantic_numpy/model/__init__.py | 2 + pydantic_numpy/model/multi_array.py | 48 ++ pydantic_numpy/model/np_model.py | 236 +++++++++ pydantic_numpy/ndarray.py | 95 ---- pydantic_numpy/typing/__init__.py | 8 + pydantic_numpy/typing/i_dimensional.py | 54 ++ pydantic_numpy/typing/ii_dimensional.py | 54 ++ pydantic_numpy/typing/iii_dimensional.py | 53 ++ pydantic_numpy/typing/n_dimensional.py | 54 ++ .../typing/strict_data_type/__init__.py | 0 .../typing/strict_data_type/i_dimensional.py | 61 +++ .../typing/strict_data_type/ii_dimensional.py | 61 +++ .../strict_data_type/iii_dimensional.py | 60 +++ .../typing/strict_data_type/n_dimensional.py | 51 ++ pydantic_numpy/util.py | 29 ++ pyproject.toml | 29 +- tests/helper/__init__.py | 0 tests/helper/cache.py | 11 + tests/helper/groups.py | 206 ++++++++ tests/test_dtype.py | 68 --- tests/test_model.py | 84 --- tests/test_ndarray.py | 142 ----- tests/test_np_model.py | 96 ++++ tests/test_typing.py | 97 ++++ 34 files changed, 1806 insertions(+), 1038 deletions(-) delete mode 100644 .github/workflows/python-package.yml create mode 100644 .github/workflows/test.yaml delete mode 100644 pydantic_numpy/dtype.py create mode 100644 pydantic_numpy/helper/__init__.py create mode 100644 pydantic_numpy/helper/annotation.py create mode 100644 pydantic_numpy/helper/validation.py delete mode 100644 pydantic_numpy/model.py create mode 100644 pydantic_numpy/model/__init__.py create mode 100644 pydantic_numpy/model/multi_array.py create mode 100644 pydantic_numpy/model/np_model.py delete mode 100644 pydantic_numpy/ndarray.py create mode 100644 pydantic_numpy/typing/__init__.py create mode 100644 pydantic_numpy/typing/i_dimensional.py create mode 100644 pydantic_numpy/typing/ii_dimensional.py create mode 100644 pydantic_numpy/typing/iii_dimensional.py create mode 100644 pydantic_numpy/typing/n_dimensional.py create mode 100644 pydantic_numpy/typing/strict_data_type/__init__.py create mode 100644 pydantic_numpy/typing/strict_data_type/i_dimensional.py create mode 100644 pydantic_numpy/typing/strict_data_type/ii_dimensional.py create mode 100644 pydantic_numpy/typing/strict_data_type/iii_dimensional.py create mode 100644 pydantic_numpy/typing/strict_data_type/n_dimensional.py create mode 100644 pydantic_numpy/util.py create mode 100644 tests/helper/__init__.py create mode 100644 tests/helper/cache.py create mode 100644 tests/helper/groups.py delete mode 100644 tests/test_dtype.py delete mode 100644 tests/test_model.py delete mode 100644 tests/test_ndarray.py create mode 100644 tests/test_np_model.py create mode 100644 tests/test_typing.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml deleted file mode 100644 index 4ce681c..0000000 --- a/.github/workflows/python-package.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Python package -on: - push: - branches: [ trunk ] - pull_request: - branches: [ trunk ] - -jobs: - build: - strategy: - fail-fast: false - matrix: - python-version: ["3.9", "3.10", "3.11"] - os: [ubuntu-18.04, macos-latest, windows-latest] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Run image - uses: abatilo/actions-poetry@v2 - - name: Install dependencies - run: | - poetry install - - name: Test with pytest - run: | - poetry run pytest diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..8f2417c --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,63 @@ +name: Python application test with GitHub Actions + +on: + push: + branches: [ trunk ] + pull_request: + branches: [ trunk ] + +jobs: + unit_tests: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [ "3.9", "3.10", "3.11" ] + + steps: + - uses: actions/checkout@v3 + + - name: Setup the Python Environment ${{ matrix.python-version }} + uses: Qwerty-133/python-setup@v1 + with: + python-version: ${{ matrix.python-version }} + skip-pre-commit: true + + - name: Install dependencies + run: | + poetry install --all-extras --with dev + + - name: Run tests + run: | + poetry run pytest . + + - name: Upload coverage reports to Codecov + uses: codecov/codecov-action@v3 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + mypy: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [ "3.9", "3.10", "3.11" ] + + steps: + - uses: actions/checkout@v3 + + - name: Setup the Python Environment ${{ matrix.python-version }} + uses: Qwerty-133/python-setup@v1 + with: + python-version: ${{ matrix.python-version }} + skip-pre-commit: true + + - name: Install dependencies + run: | + poetry install --all-extras --with dev --with ci + + - name: Validate type-hints with MyPy + run: | + poetry run mypy --ignore-missing-imports \ + --follow-imports=skip \ + --strict-optional \ + -p pydantic_numpy diff --git a/README.md b/README.md index 530002b..0f0a1dc 100644 --- a/README.md +++ b/README.md @@ -1,54 +1,71 @@ # pydantic-numpy -Integrate NumPy into Pydantic, and provide tooling! `NumpyModel` make it possible to dump and load `np.ndarray` within model fields! +Package that integrates NumPy Arrays into Pydantic! -### Install -```shell -pip install pydantic-numpy -``` +- `NumpyModel` make it possible to dump and load `np.ndarray` within model fields alongside other fields that are not instances of `np.ndarray`! +- `pydantic_numpy.typing` provides many typings such as `NpNDArrayFp64`, `Np3DArrayFp64` (float64 that must be 3D)! ## Usage -For more examples see [test_ndarray.py](./tests/test_ndarray.py) +For more examples see [test_ndarray.py](./tests/test_typing.py) ```python -import pydantic_numpy.dtype as pnd -from pydantic_numpy import NDArray, NDArrayFp32, NumpyModel +import numpy as np + +import pydantic_numpy.typing as pnd +from pydantic_numpy import np_array_pydantic_annotated_typing +from pydantic_numpy.model import NumpyModel, MultiArrayNumpyFile + +class MyNumpyModel(NumpyModel): + any_array_dtype_and_dimension: pnd.NpNDArray -class MyPydanticNumpyModel(NumpyModel): - K: NDArray[float, pnd.float32] - C: NDArrayFp32 # <- Shorthand for same type as K + # Must be numpy float32 as dtype + k: np_array_pydantic_annotated_typing(data_type=np.float32) + shorthand_for_k: pnd.NpNDArrayFp32 + + must_be_1d_np_array: np_array_pydantic_annotated_typing(dimensions=1) + + +class MyDemoModel(NumpyModel): + k: np_array_pydantic_annotated_typing(data_type=np.float32) # Instantiate from array -cfg = MyPydanticNumpyModel(K=[1, 2]) +cfg = MyDemoModel(k=[1, 2]) # Instantiate from numpy file -cfg = MyPydanticNumpyModel(K={"path": "path_to/array.npy"}) +cfg = MyDemoModel(k="path_to/array.npy") # Instantiate from npz file with key -cfg = MyPydanticNumpyModel(K={"path": "path_to/array.npz", "key": "K"}) +cfg = MyDemoModel(k=MultiArrayNumpyFile(path="path_to/array.npz", key="k")) -cfg.K -# np.ndarray[np.float32] +cfg.k # np.ndarray[np.float32] cfg.dump("path_to_dump_dir", "object_id") cfg.load("path_to_dump_dir", "object_id") ``` -`NumpyModel.load` requires the original mode, use `model_agnostic_load` when you have several models that may be the right model. - -### Data type (dtype) support! +`NumpyModel.load` requires the original mode, use `model_agnostic_load` when you have several models that may be the right model: +```python +from pydantic_numpy.model.np_model import model_agnostic_load -This package also comes with `pydantic_numpy.dtype`, which adds subtyping support such as `NDArray[float, pnd.float32]`. All subfields must be from this package as numpy dtypes have no Pydantic support, which is implemented in this package through the [generic class workflow](https://pydantic-docs.helpmanual.io/usage/types/#generic-classes-as-types). +cfg.dump("path_to_dump_dir", "object_id") +equals_cfg = model_agnostic_load("path_to_dump_dir", "object_id", models=[MyNumpyModel, MyDemoModel]) +``` -## Considerations +### Data type (dtype) support! -The `NDArray` class from `pydantic-numpy` is daughter of `np.ndarray`. IDEs and linters might complain that you are passing an incorrect `type` to a model. The only solution is to merge these change into `numpy`. +This package also comes with `pydantic_numpy.dtype`, which adds subtyping support such as `NpNDArray[float, pnd.float32]`. All subfields must be from this package as numpy dtypes have no Pydantic support, which is implemented in this package through the [generic class workflow](https://pydantic-docs.helpmanual.io/usage/types/#generic-classes-as-types). -You can also use the `typings` in `pydantic.validate_arguments`. +### Install +```shell +pip install pydantic-numpy +``` -You can install from [cheind's](https://github.com/cheind/pydantic-numpy) repository if you want Python `3.8` support. +## Considerations +You can install from [cheind's](https://github.com/cheind/pydantic-numpy) repository if you want Python `3.8` support, but this version only support Pydantic V1 and will not work with V2. -## History +### Licensing notice +As of version `3.0.0` the license has moved over to BSD-4. The versions prior are under the MIT license. -The original idea originates from [this discussion](https://gist.github.com/danielhfrank/00e6b8556eed73fb4053450e602d2434), and forked from [cheind's](https://github.com/cheind/pydantic-numpy) repository. +### History +The original idea originates from [this discussion](https://gist.github.com/danielhfrank/00e6b8556eed73fb4053450e602d2434), and forked from [cheind's](https://github.com/cheind/pydantic-numpy) repository. \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 475d3d8..f2c86a6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,75 +1,33 @@ # This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] -name = "black" -version = "23.7.0" -description = "The uncompromising code formatter." +name = "annotated-types" +version = "0.5.0" +description = "Reusable constraint types to use with typing.Annotated" optional = false -python-versions = ">=3.8" -files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "cfgv" -version = "3.3.1" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" files = [ - {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, - {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, + {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, + {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, ] [[package]] -name = "click" -version = "8.1.5" -description = "Composable command line interface toolkit" +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"}, - {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"}, + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "colorama" @@ -102,17 +60,6 @@ dill = ["dill"] full = ["cloudpickle", "dill", "lz4"] lz4 = ["lz4"] -[[package]] -name = "distlib" -version = "0.3.7" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, -] - [[package]] name = "exceptiongroup" version = "1.1.2" @@ -128,33 +75,36 @@ files = [ test = ["pytest (>=6)"] [[package]] -name = "filelock" -version = "3.12.2" -description = "A platform independent file lock." +name = "hypothesis" +version = "6.82.0" +description = "A library for property-based testing" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "hypothesis-6.82.0-py3-none-any.whl", hash = "sha256:fa8eee429b99f7d3c953fb2b57de415fd39b472b09328b86c1978f12669ef395"}, + {file = "hypothesis-6.82.0.tar.gz", hash = "sha256:ffece8e40a34329e7112f7408f2c45fe587761978fdbc6f4f91bf0d683a7d4d9"}, ] -[package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "identify" -version = "2.5.24" -description = "File identification library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"}, - {file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"}, -] +[package.dependencies] +attrs = ">=19.2.0" +exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +sortedcontainers = ">=2.1.0,<3.0.0" [package.extras] -license = ["ukkonen"] +all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "django (>=3.2)", "dpcontracts (>=0.4)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2023.3)"] +cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] +codemods = ["libcst (>=0.3.16)"] +dateutil = ["python-dateutil (>=1.4)"] +django = ["django (>=3.2)"] +dpcontracts = ["dpcontracts (>=0.4)"] +ghostwriter = ["black (>=19.10b0)"] +lark = ["lark (>=0.10.1)"] +numpy = ["numpy (>=1.17.3)"] +pandas = ["pandas (>=1.1)"] +pytest = ["pytest (>=4.6)"] +pytz = ["pytz (>=2014.1)"] +redis = ["redis (>=3.0.0)"] +zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2023.3)"] [[package]] name = "iniconfig" @@ -167,23 +117,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "isort" -version = "5.12.0" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, -] - -[package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - [[package]] name = "lz4" version = "4.3.2" @@ -233,6 +166,52 @@ docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] flake8 = ["flake8"] tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] +[[package]] +name = "mypy" +version = "1.4.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, + {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, + {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, + {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, + {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, + {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, + {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, + {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, + {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, + {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, + {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, + {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, + {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, + {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, + {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, + {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, + {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, + {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, + {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -244,20 +223,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "nodeenv" -version = "1.8.0" -description = "Node.js virtual environment builder" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" -files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, -] - -[package.dependencies] -setuptools = "*" - [[package]] name = "numpy" version = "1.25.1" @@ -304,30 +269,18 @@ files = [ ] [[package]] -name = "pathspec" -version = "0.11.1" -description = "Utility library for gitignore style pattern matching of file paths." +name = "parameterized" +version = "0.9.0" +description = "Parameterized testing with any Python test framework" optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, -] - -[[package]] -name = "platformdirs" -version = "3.9.1" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-3.9.1-py3-none-any.whl", hash = "sha256:ad8291ae0ae5072f66c16945166cb11c63394c7a3ad1b1bc9828ca3162da8c2f"}, - {file = "platformdirs-3.9.1.tar.gz", hash = "sha256:1b42b450ad933e981d56e59f1b97495428c9bd60698baab9f3eb3d00d5822421"}, + {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"}, + {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] +dev = ["jinja2"] [[package]] name = "pluggy" @@ -345,74 +298,136 @@ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] -name = "pre-commit" -version = "3.3.3" -description = "A framework for managing and maintaining multi-language pre-commit hooks." +name = "pydantic" +version = "2.0.3" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, + {file = "pydantic-2.0.3-py3-none-any.whl", hash = "sha256:614eb3321eb600c81899a88fa9858b008e3c79e0d4f1b49ab1f516b4b0c27cfb"}, + {file = "pydantic-2.0.3.tar.gz", hash = "sha256:94f13e0dcf139a5125e88283fc999788d894e14ed90cf478bcc2ee50bd4fc630"}, ] [package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.3.0" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] [[package]] -name = "pydantic" -version = "1.10.11" -description = "Data validation and settings management using python type hints" +name = "pydantic-core" +version = "2.3.0" +description = "" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, - {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, - {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, - {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, - {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, - {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, - {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, - {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, - {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, - {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, + {file = "pydantic_core-2.3.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:4542c98b8364b976593703a2dda97377433b102f380b61bc3a2cbc2fbdae1d1f"}, + {file = "pydantic_core-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9342de50824b40f55d2600f66c6f9a91a3a24851eca39145a749a3dc804ee599"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:539432f911686cb80284c30b33eaf9f4fd9a11e1111fe0dc98fdbdce69b49821"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38a0e7ee65c8999394d92d9c724434cb629279d19844f2b69d9bbc46dc8b8b61"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_24_armv7l.whl", hash = "sha256:e3ed6834cc005798187a56c248a2240207cb8ffdda1c89e9afda4c3d526c2ea0"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:e72ac299a6bf732a60852d052acf3999d234686755a02ba111e85e7ebf8155b1"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_24_s390x.whl", hash = "sha256:616b3451b05ca63b8f433c627f68046b39543faeaa4e50d8c6699a2a1e4b85a5"}, + {file = "pydantic_core-2.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:adcb9c8848e15c613e483e0b99767ae325af27fe0dbd866df01fe5849d06e6e1"}, + {file = "pydantic_core-2.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:464bf799b422be662e5e562e62beeffc9eaa907d381a9d63a2556615bbda286d"}, + {file = "pydantic_core-2.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4638ebc17de08c2f3acba557efeb6f195c88b7299d8c55c0bb4e20638bbd4d03"}, + {file = "pydantic_core-2.3.0-cp310-none-win32.whl", hash = "sha256:9ff322c7e1030543d35d83bb521b69114d3d150750528d7757544f639def9ad6"}, + {file = "pydantic_core-2.3.0-cp310-none-win_amd64.whl", hash = "sha256:4824eb018f0a4680b1e434697a9bf3f41c7799b80076d06530cbbd212e040ccc"}, + {file = "pydantic_core-2.3.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:0aa429578e23885b3984c49d687cd05ab06f0b908ea1711a8bf7e503b7f97160"}, + {file = "pydantic_core-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20d710c1f79af930b8891bcebd84096798e4387ab64023ef41521d58f21277d3"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:309f45d4d7481d6f09cb9e35c72caa0e50add4a30bb08c04c5fe5956a0158633"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bcfb7be905aa849bd882262e1df3f75b564e2f708b4b4c7ad2d3deaf5410562"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_24_armv7l.whl", hash = "sha256:85cd9c0af34e371390e3cb2f3a470b0b40cc07568c1e966c638c49062be6352d"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_24_ppc64le.whl", hash = "sha256:37c5028cebdf731298724070838fb3a71ef1fbd201d193d311ac2cbdbca25a23"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_24_s390x.whl", hash = "sha256:e4208f23f12d0ad206a07a489ef4cb15722c10b62774c4460ee4123250be938e"}, + {file = "pydantic_core-2.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c24465dd11b65c8510f251b095fc788c7c91481c81840112fe3f76c30793a455"}, + {file = "pydantic_core-2.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3cd7ee8bbfab277ab56e272221886fd33a1b5943fbf45ae9195aa6a48715a8a0"}, + {file = "pydantic_core-2.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0fc7e0b056b66cc536e97ef60f48b3b289f6b3b62ac225afd4b22a42434617bf"}, + {file = "pydantic_core-2.3.0-cp311-none-win32.whl", hash = "sha256:4788135db4bd83a5edc3522b11544b013be7d25b74b155e08dd3b20cd6663bbb"}, + {file = "pydantic_core-2.3.0-cp311-none-win_amd64.whl", hash = "sha256:f93c867e5e85584a28c6a6feb6f2086d717266eb5d1210d096dd717b7f4dec04"}, + {file = "pydantic_core-2.3.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:73f62bb7fd862d9bcd886e10612bade6fe042eda8b47e8c129892bcfb7b45e84"}, + {file = "pydantic_core-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d889d498fce64bfcd8adf1a78579a7f626f825cbeb2956a24a29b35f9a1df32"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d55e38a89ec2ae17b2fa7ffeda6b70f63afab1888bd0d57aaa7b7879760acb4"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1aefebb506bc1fe355d91d25f12bcdea7f4d7c2d9f0f6716dd025543777c99a5"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_24_armv7l.whl", hash = "sha256:6441a29f42585f085db0c04cd0557d4cbbb46fa68a0972409b1cfe9f430280c1"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_24_ppc64le.whl", hash = "sha256:47e8f034be31390a8f525431eb5e803a78ce7e2e11b32abf5361a972e14e6b61"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_24_s390x.whl", hash = "sha256:ad814864aba263be9c83ada44a95f72d10caabbf91589321f95c29c902bdcff0"}, + {file = "pydantic_core-2.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9eff3837d447fccf2ac38c259b14ab9cbde700df355a45a1f3ff244d5e78f8b6"}, + {file = "pydantic_core-2.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:534f3f63c000f08050c6f7f4378bf2b52d7ba9214e9d35e3f60f7ad24a4d6425"}, + {file = "pydantic_core-2.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ef6a222d54f742c24f6b143aab088702db3a827b224e75b9dd28b38597c595fe"}, + {file = "pydantic_core-2.3.0-cp312-none-win32.whl", hash = "sha256:4e26944e64ecc1d7b19db954c0f7b471f3b141ec8e1a9f57cfe27671525cd248"}, + {file = "pydantic_core-2.3.0-cp312-none-win_amd64.whl", hash = "sha256:019c5c41941438570dfc7d3f0ae389b2425add1775a357ce1e83ed1434f943d6"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:27c1bbfb9d84a75cf33b7f19b53c29eb7ead99b235fce52aced5507174ab8f98"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:7cb496e934b71f1ade844ab91d6ccac78a3520e5df02fdb2357f85a71e541e69"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af2d43b1978958d91351afbcc9b4d0cfe144c46c61740e82aaac8bb39ab1a4d"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3097c39d7d4e8dba2ef86de171dcccad876c36d8379415ba18a5a4d0533510"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_24_armv7l.whl", hash = "sha256:dd3b023f3317dbbbc775e43651ce1a31a9cea46216ad0b5be37afc18a2007699"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:27babb9879bf2c45ed655d02639f4c30e2b9ef1b71ce59c2305bbf7287910a18"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_24_s390x.whl", hash = "sha256:2183a9e18cdc0de53bdaa1675f237259162abeb62d6ac9e527c359c1074dc55d"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c089d8e7f1b4db08b2f8e4107304eec338df046275dad432635a9be9531e2fc8"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f10aa5452b865818dd0137f568d443f5e93b60a27080a01aa4b7512c7ba13a3"}, + {file = "pydantic_core-2.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f642313d559f9d9a00c4de6820124059cc3342a0d0127b18301de2c680d5ea40"}, + {file = "pydantic_core-2.3.0-cp37-none-win32.whl", hash = "sha256:45327fc57afbe3f2c3d7f54a335d5cecee8a9fdb3906a2fbed8af4092f4926df"}, + {file = "pydantic_core-2.3.0-cp37-none-win_amd64.whl", hash = "sha256:e427b66596a6441a5607dfc0085b47d36073f88da7ac48afd284263b9b99e6ce"}, + {file = "pydantic_core-2.3.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:0b3d781c71b8bfb621ef23b9c874933e2cd33237c1a65cc20eeb37437f8e7e18"}, + {file = "pydantic_core-2.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad46027dbd5c1db87dc0b49becbe23093b143a20302028d387dae37ee5ef95f5"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39aa09ed7ce2a648c904f79032d16dda29e6913112af8465a7bf710eef23c7ca"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b4bf8c58409586a7a04c858a86ab10f28c6c1a7c33da65e0326c59d5b0ab16"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_24_armv7l.whl", hash = "sha256:ba2b807d2b62c446120906b8580cddae1d76d3de4efbb95ccc87f5e35c75b4b2"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:ea955e4ed21f4bbb9b83fea09fc6af0bed82e69ecf6b35ec89237a0a49633033"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_24_s390x.whl", hash = "sha256:06884c07956526ac9ebfef40fe21a11605569b8fc0e2054a375fb39c978bf48f"}, + {file = "pydantic_core-2.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f868e731a18b403b88aa434d960489ceeed0ddeb44ebc02389540731a67705e0"}, + {file = "pydantic_core-2.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cb08fab0fc1db15c277b72e33ac74ad9c0c789413da8984a3eacb22a94b42ef4"}, + {file = "pydantic_core-2.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6ca34c29fbd6592de5fd39e80c1993634d704c4e7e14ba54c87b2c7c53da68fe"}, + {file = "pydantic_core-2.3.0-cp38-none-win32.whl", hash = "sha256:cd782807d35c8a41aaa7d30b5107784420eefd9fdc1c760d86007d43ae00b15d"}, + {file = "pydantic_core-2.3.0-cp38-none-win_amd64.whl", hash = "sha256:01f56d5ee70b1d39c0fd08372cc5142274070ab7181d17c86035f130eebc05b8"}, + {file = "pydantic_core-2.3.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:78b1ac0151271ce62bc2b33755f1043eda6a310373143a2f27e2bcd3d5fc8633"}, + {file = "pydantic_core-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:64bfd2c35a2c350f73ac52dc134d8775f93359c4c969280a6fe5301b5b6e7431"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:937c0fe9538f1212b62df6a68f8d78df3572fe3682d9a0dd8851eac8a4e46063"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d965c7c4b40d1cedec9188782e98bd576f9a04868835604200c3a6e817b824f"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_24_armv7l.whl", hash = "sha256:ad442b8585ed4a3c2d22e4bf7b465d9b7d281e055b09719a8aeb5b576422dc9b"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:4bf20c9722821fce766e685718e739deeccc60d6bc7be5029281db41f999ee0c"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_24_s390x.whl", hash = "sha256:f3dd5333049b5b3faa739e0f40b77cc8b7a1aded2f2da0e28794c81586d7b08a"}, + {file = "pydantic_core-2.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dc5f516b24d24bc9e8dd9305460899f38302b3c4f9752663b396ef9848557bf"}, + {file = "pydantic_core-2.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:055f7ea6b1fbb37880d66d70eefd22dd319b09c79d2cb99b1dbfeb34b653b0b2"}, + {file = "pydantic_core-2.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:af693a89db6d6ac97dd84dd7769b3f2bd9007b578127d0e7dda03053f4d3b34b"}, + {file = "pydantic_core-2.3.0-cp39-none-win32.whl", hash = "sha256:f60e31e3e15e8c294bf70c60f8ae4d0c3caf3af8f26466e9aa8ea4c01302749b"}, + {file = "pydantic_core-2.3.0-cp39-none-win_amd64.whl", hash = "sha256:2b79f3681481f4424d7845cc7a261d5a4baa810d656b631fa844dc9967b36a7b"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:a666134b41712e30a71afaa26deeb4da374179f769fa49784cdf0e7698880fab"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c119e9227487ad3d7c3c737d896afe548a6be554091f9745da1f4b489c40561"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73929a2fb600a2333fce2efd92596cff5e6bf8946e20e93c067b220760064862"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:41bbc2678a5b6a19371b2cb51f30ccea71f0c14b26477d2d884fed761cea42c7"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dcbff997f47d45bf028bda4c3036bb3101e89a3df271281d392b6175f71c71d1"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:afa8808159169368b66e4fbeafac6c6fd8f26246dc4d0dcc2caf94bd9cf1b828"}, + {file = "pydantic_core-2.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12be3b5f54f8111ca38e6b7277f26c23ba5cb3344fae06f879a0a93dfc8b479e"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ed5babdcd3d052ba5cf8832561f18df20778c7ccf12587b2d82f7bf3bf259a0e"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d642e5c029e2acfacf6aa0a7a3e822086b3b777c70d364742561f9ca64c1ffc"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba3073eb38a1294e8c7902989fb80a7a147a69db2396818722bd078476586a0"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5146a6749b1905e04e62e0ad4622f079e5582f8b3abef5fb64516c623127908"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:deeb64335f489c3c11949cbd1d1668b3f1fb2d1c6a5bf40e126ef7bf95f9fa40"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:31acc37288b8e69e4849f618c3d5cf13b58077c1a1ff9ade0b3065ba974cd385"}, + {file = "pydantic_core-2.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e09d9f6d722de9d4c1c5f122ea9bc6b25a05f975457805af4dcab7b0128aacbf"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ba6a8cf089222a171b8f84e6ec2d10f7a9d14f26be3a347b14775a8741810676"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1fd1b24e9bcddcb168437686677104e205c8e25b066e73ffdf331d3bb8792b"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eda1a89c4526826c0a87d33596a4cd15b8f58e9250f503e39af1699ba9c878e8"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3e9a18401a28db4358da2e191508702dbf065f2664c710708cdf9552b9fa50c"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a439fd0d45d51245bbde799726adda5bd18aed3fa2b01ab2e6a64d6d13776fa3"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:bf6a1d2c920cc9528e884850a4b2ee7629e3d362d5c44c66526d4097bbb07a1a"}, + {file = "pydantic_core-2.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e33fcbea3b63a339dd94de0fc442fefacfe681cc7027ce63f67af9f7ceec7422"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:bf3ed993bdf4754909f175ff348cf8f78d4451215b8aa338633f149ca3b1f37a"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7584171eb3115acd4aba699bc836634783f5bd5aab131e88d8eeb8a3328a4a72"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1624baa76d1740711b2048f302ae9a6d73d277c55a8c3e88b53b773ebf73a971"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:06f33f695527f5a86e090f208978f9fd252c9cfc7e869d3b679bd71f7cb2c1fa"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7ecf0a67b212900e92f328181fed02840d74ed39553cdb38d27314e2b9c89dfa"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:45fa1e8ad6f4367ad73674ca560da8e827cc890eaf371f3ee063d6d7366a207b"}, + {file = "pydantic_core-2.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8d0dbcc57839831ae79fd24b1b83d42bc9448d79feaf3ed3fb5cbf94ffbf3eb7"}, + {file = "pydantic_core-2.3.0.tar.gz", hash = "sha256:5cfb5ac4e82c47d5dc25b209dd4c3989e284b80109f9e08b33c895080c424b4f"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pytest" @@ -436,55 +451,6 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - [[package]] name = "ruamel-yaml" version = "0.17.32" @@ -564,6 +530,17 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + [[package]] name = "tomli" version = "2.0.1" @@ -586,27 +563,7 @@ files = [ {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] -[[package]] -name = "virtualenv" -version = "20.24.0" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.24.0-py3-none-any.whl", hash = "sha256:18d1b37fc75cc2670625702d76849a91ebd383768b4e91382a8d51be3246049e"}, - {file = "virtualenv-20.24.0.tar.gz", hash = "sha256:e2a7cef9da880d693b933db7654367754f14e20650dc60e8ee7385571f8593a3"}, -] - -[package.dependencies] -distlib = ">=0.3.6,<1" -filelock = ">=3.12,<4" -platformdirs = ">=3.5.1,<4" - -[package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] - [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.12" -content-hash = "3b14d546d76e50c96e89ac3ed5580255e01cfa8e77881dc030f72e51772fd058" +content-hash = "bcc82a64dd2cb318c5325bb80892f2dd847fe8451978598855abfb05d355d1a5" diff --git a/pydantic_numpy/__init__.py b/pydantic_numpy/__init__.py index 9dd9a54..196b37e 100644 --- a/pydantic_numpy/__init__.py +++ b/pydantic_numpy/__init__.py @@ -1,3 +1,2 @@ -from pydantic_numpy.dtype import * # noqa: F403 -from pydantic_numpy.model import NumpyModel, model_agnostic_load # noqa: F401 -from pydantic_numpy.ndarray import NDArray, NPFileDesc, PotentialNDArray # noqa: F401 +from pydantic_numpy.helper.annotation import np_array_pydantic_annotated_typing +from pydantic_numpy.typing.n_dimensional import NpNDArray diff --git a/pydantic_numpy/dtype.py b/pydantic_numpy/dtype.py deleted file mode 100644 index df5d8be..0000000 --- a/pydantic_numpy/dtype.py +++ /dev/null @@ -1,167 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, TypeVar - -import numpy as np -from pydantic import ValidationError -from pydantic.fields import ModelField - -from pydantic_numpy.ndarray import NDArray - -if TYPE_CHECKING: - from pydantic.typing import CallableGenerator - - -class _BaseDType(object): - @classmethod - def __modify_schema__(cls, field_schema: dict[str, Any]) -> None: - field_schema.update({"type": cls.__name__}) - - @classmethod - def __get_validators__(cls) -> CallableGenerator: - yield cls.validate - - @classmethod - def validate(cls, val: Any, field: ModelField) -> "PdNpDtype": - if field.sub_fields: - msg = f"{cls.__name__} has no subfields" - raise ValidationError(msg) - if not isinstance(val, cls): - return cls(val) - return val - - -PdNpDtype = TypeVar("PdNpDtype", bound=_BaseDType) - - -class longdouble(np.longdouble, _BaseDType): - pass - - -float128 = longdouble - - -class double(np.double, _BaseDType): - pass - - -float64 = double - - -class single(np.single, _BaseDType): - pass - - -float32 = single - - -class half(np.half, _BaseDType): - pass - - -float16 = half - - -class int_(np.int_, _BaseDType): - pass - - -int64 = int_ - - -class intc(np.intc, _BaseDType): - pass - - -int32 = intc - - -class short(np.short, _BaseDType): - pass - - -int16 = short - - -class byte(np.byte, _BaseDType): - pass - - -int8 = byte - - -class uint(np.uint, _BaseDType): - pass - - -uint64 = uint - - -class uintc(np.uintc, _BaseDType): - pass - - -uint32 = uintc - - -class ushort(np.ushort, _BaseDType): - pass - - -uint16 = ushort - - -class ubyte(np.ubyte, _BaseDType): - pass - - -uint8 = ubyte - - -class clongdouble(np.clongdouble, _BaseDType): - pass - - -complex256 = clongdouble - - -class cdouble(np.cdouble, _BaseDType): - pass - - -complex128 = cdouble - - -class csingle(np.csingle, _BaseDType): - pass - - -complex64 = csingle - - -class npbool(np.bool_, _BaseDType): - pass - - -# NDArray typings - -NDArrayInt64 = NDArray[int, int64] -NDArrayInt32 = NDArray[int, int32] -NDArrayInt16 = NDArray[int, int16] -NDArrayInt8 = NDArray[int, int8] - -NDArrayUint64 = NDArray[int, uint64] -NDArrayUint32 = NDArray[int, uint32] -NDArrayUint16 = NDArray[int, uint16] -NDArrayUint8 = NDArray[int, uint8] - -NDArrayFp128 = NDArray[float, float128] -NDArrayFp64 = NDArray[float, float64] -NDArrayFp32 = NDArray[float, float32] -NDArrayFp16 = NDArray[float, float16] - -NDArrayComplex256 = NDArray[float, complex256] -NDArrayComplex128 = NDArray[float, complex128] -NDArrayComplex64 = NDArray[float, complex64] - -NDArrayBool = NDArray[bool, npbool] diff --git a/pydantic_numpy/helper/__init__.py b/pydantic_numpy/helper/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pydantic_numpy/helper/annotation.py b/pydantic_numpy/helper/annotation.py new file mode 100644 index 0000000..d495771 --- /dev/null +++ b/pydantic_numpy/helper/annotation.py @@ -0,0 +1,155 @@ +from collections.abc import Sequence +from pathlib import Path +from typing import Any, Callable, ClassVar, Optional, Union + +import numpy as np +import numpy.typing as npt +from numpy.lib.npyio import NpzFile +from numpy.typing import DTypeLike +from pydantic import FilePath, GetJsonSchemaHandler, PositiveInt +from pydantic.json_schema import JsonSchemaValue +from pydantic_core import core_schema +from typing_extensions import Annotated + +from pydantic_numpy.helper.validation import ( + create_array_validator, + validate_multi_array_numpy_file, + validate_numpy_array_file, +) +from pydantic_numpy.model.multi_array import MultiArrayNumpyFile + + +class NpArrayPydanticAnnotation: + dimensions: ClassVar[Optional[PositiveInt]] + + data_type: ClassVar[DTypeLike] + strict_data_typing: ClassVar[bool] + + @classmethod + def factory( + cls, *, data_type: DTypeLike, dimensions: Optional[int] = None, strict_data_typing: bool = False + ) -> type: + """ + Create an instance NpArrayPydanticAnnotation that is configured for a specific dimension and dtype. + + The signature of the function is data_type, dimension and not dimension, data_type to reduce amount of + code for all the types. + + Parameters + ---------- + data_type: DTypeLike + dimensions: Optional[int] + Number of dimensions determine the depth of the numpy array. + strict_data_typing: bool + If True, the dtype of the numpy array must be identical to the data_type. No conversion attempts. + + Returns + ------- + NpArrayPydanticAnnotation + """ + if strict_data_typing and not data_type: + msg = "Strict data typing requires data_type (DTypeLike) definition" + raise ValueError(msg) + + return type( + ( + f"Np{'Strict' if strict_data_typing else ''}{dimensions or 'N'}DArray" + f"{data_type.__name__.capitalize() if data_type else ''}PydanticAnnotation" + ), + (cls,), + {"dimensions": dimensions, "data_type": data_type, "strict_data_typing": strict_data_typing}, + ) + + @classmethod + def __get_pydantic_core_schema__( + cls, + _source_type: Any, + _handler: Callable[[Any], core_schema.CoreSchema], + ) -> core_schema.CoreSchema: + np_array_validator = create_array_validator(cls.dimensions, cls.data_type, cls.strict_data_typing) + np_array_schema = core_schema.no_info_plain_validator_function(np_array_validator) + + return core_schema.json_or_python_schema( + python_schema=core_schema.chain_schema([_common_numpy_array_validator, np_array_schema]), + json_schema=np_array_schema, + serialization=core_schema.plain_serializer_function_ser_schema( + lambda arr: np.array2string(arr), when_used="json" + ), + ) + + @classmethod + def __get_pydantic_json_schema__( + cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + return handler( + dict( + type=( + f"np.ndarray[{_int_to_dim_type[cls.dimensions] if cls.dimensions else 'Any'}, " + f"{np.dtype[cls.data_type.__name__] if _data_type_resolver(cls.data_type) else cls.data_type}]" # type: ignore[name-defined] + ), + strict_data_typing=cls.strict_data_typing, + ) + ) + + +def np_array_pydantic_annotated_typing( + data_type: DTypeLike = None, dimensions: Optional[int] = None, strict_data_typing: bool = False +): + """ + Generates typing and pydantic annotation of a np.ndarray parametrized with given constraints + + Parameters + ---------- + data_type: DTypeLike + dimensions: Optional[int] + Number of dimensions determine the depth of the numpy array. + strict_data_typing: bool + If True, the dtype of the numpy array must be identical to the data_type. No conversion attempts. + + Returns + ------- + type-hint for np.ndarray with Pydantic support + """ + return Annotated[ + Union[ + FilePath, + MultiArrayNumpyFile, + np.ndarray[ # type: ignore[misc] + _int_to_dim_type[dimensions] if dimensions else Any, + np.dtype[data_type] if _data_type_resolver(data_type) else data_type, + ], + ], + NpArrayPydanticAnnotation.factory( + data_type=data_type, dimensions=dimensions, strict_data_typing=strict_data_typing + ), + ] + + +def _data_type_resolver(data_type: DTypeLike): + return data_type is not None and issubclass(data_type, np.generic) + + +_int_to_dim_type = {1: tuple[int], 2: tuple[int, int], 3: tuple[int, int, int]} +_common_numpy_array_validator = core_schema.union_schema( + [ + core_schema.chain_schema( + [ + core_schema.is_instance_schema(Path), + core_schema.no_info_plain_validator_function(validate_numpy_array_file), + ] + ), + core_schema.chain_schema( + [ + core_schema.is_instance_schema(MultiArrayNumpyFile), + core_schema.no_info_plain_validator_function(validate_multi_array_numpy_file), + ] + ), + core_schema.is_instance_schema(np.ndarray), + core_schema.chain_schema( + [ + core_schema.is_instance_schema(Sequence), + core_schema.no_info_plain_validator_function(lambda v: np.asarray(v)), + ] + ), + ] +) diff --git a/pydantic_numpy/helper/validation.py b/pydantic_numpy/helper/validation.py new file mode 100644 index 0000000..2d63af3 --- /dev/null +++ b/pydantic_numpy/helper/validation.py @@ -0,0 +1,123 @@ +from typing import Callable, Optional + +import numpy as np +import numpy.typing as npt +from numpy import floating, integer +from numpy.lib.npyio import NpzFile +from pydantic import FilePath + +from pydantic_numpy.model.multi_array import MultiArrayNumpyFile + + +class PydanticNumpyMultiArrayNumpyFileOnFilePath(Exception): + pass + + +def create_array_validator( + dimensions: Optional[int], target_data_type: npt.DTypeLike, strict_data_typing: bool +) -> Callable[[npt.NDArray], npt.NDArray]: + """ + Creates a validator that ensures the numpy array has the defined dimensions and dtype (data_type). + + Parameters + ---------- + dimensions: int | None + Default to None; if set to an integer, enforce the dimension of the numpy array to that integer + target_data_type: DTypeLike + The data type the array must have after validation, arrays with different data types will be converted + during validation. Float to integer is rounded (np.round) followed by an astype with target data type. + strict_data_typing: bool + Default False; if True, the incoming array must its dtype match the target_data_type. Strict mode. + + Returns + ------- + Callable[[npt.NDArray], npt.NDArray] + Validator for numpy array + """ + + def array_validator(array: npt.NDArray) -> npt.NDArray: + if dimensions and (array_dimensions := len(array.shape)) != dimensions: + msg = f"Array {array_dimensions}-dimensional; the target dimensions is {dimensions}" + raise ValueError(msg) + + if target_data_type and array.dtype.type != target_data_type: + if strict_data_typing: + msg = f"The data_type {array.dtype.type} does not coincide with type hint; {target_data_type}" + raise ValueError(msg) + + if issubclass(_resolve_type_of_array_dtype(target_data_type), integer) and issubclass( + _resolve_type_of_array_dtype(array.dtype), floating + ): + array = np.round(array).astype(target_data_type, copy=False) + else: + array = array.astype(target_data_type, copy=True) + + return array + + return array_validator + + +def validate_numpy_array_file(v: FilePath) -> npt.NDArray: + """ + Validate file path to numpy file by loading and return the respective numpy array + + Parameters + ---------- + v: FilePath + Path to the numpy file + + Returns + ------- + NDArray + """ + result = np.load(v) + + if isinstance(result, NpzFile): + files = result.files + if len(files) > 1: + msg = ( + f"The provided file path is a multi array NpzFile, which is not supported; " + f"convert to single array NpzFiles.\n" + f"Path to multi array file: {result}\n" + f"Array keys: {', '.join(result.files)}\n" + f"Use pydantic_numpy.{MultiArrayNumpyFile.__class__.__name__} instead of a PathLike alone" + ) + raise PydanticNumpyMultiArrayNumpyFileOnFilePath(msg) + result = result[files[0]] + + return result + + +def validate_multi_array_numpy_file(v: MultiArrayNumpyFile) -> npt.NDArray: + """ + Validation function for loading numpy array from a name mapping numpy file + + Parameters + ---------- + v: MultiArrayNumpyFile + MultiArrayNumpyFile to load + + Returns + ------- + NDArray from MultiArrayNumpyFile + """ + return v.load() + + +def _resolve_type_of_array_dtype(array_dtype: npt.DTypeLike) -> type: + """ + np.dtype have the type stored in the type attribute, function to extract that type. + If the DTypelike isn't np.dtype we just return what is already a type. + + Parameters + ---------- + array_dtype: DTypeLike + + Returns + ------- + type + """ + if hasattr(array_dtype, "type"): + return array_dtype.type + else: + return array_dtype diff --git a/pydantic_numpy/model.py b/pydantic_numpy/model.py deleted file mode 100644 index 05b8167..0000000 --- a/pydantic_numpy/model.py +++ /dev/null @@ -1,148 +0,0 @@ -import pickle as pickle_pkg -from pathlib import Path -from typing import ( - Any, - Callable, - ClassVar, - Dict, - Iterable, - Optional, - Tuple, - Type, - TypeVar, - Union, -) - -import compress_pickle -import numpy as np -from pydantic import BaseModel, DirectoryPath, FilePath, validate_arguments -from ruamel.yaml import YAML - -yaml = YAML() - - -class NumpyModel(BaseModel): - _dump_compression: ClassVar[str] = "lz4" - _dump_numpy_savez_file_name: ClassVar[str] = "arrays.npz" - _dump_non_array_file_stem: ClassVar[str] = "object_info" - - _directory_suffix: ClassVar[str] = ".pdnp" - - @classmethod - @validate_arguments - def model_directory_path(cls, output_directory: DirectoryPath, object_id: str) -> DirectoryPath: - return output_directory / f"{object_id}.{cls.__name__}{cls._directory_suffix}" - - def dump( - self, output_directory: Path, object_id: str, compress: bool = True, pickle: bool = False - ) -> DirectoryPath: - assert not self.__config__.arbitrary_types_allowed or ( - self.__config__.arbitrary_types_allowed and pickle - ), "Arbitrary types are only supported in pickle mode" - - dump_directory_path = self.model_directory_path(output_directory, object_id) - dump_directory_path.mkdir(parents=True, exist_ok=True) - - ndarray_field_to_array, other_field_to_value = self._dump_numpy_split_dict() - - if ndarray_field_to_array: - (np.savez_compressed if compress else np.savez)( - dump_directory_path / self._dump_numpy_savez_file_name, **ndarray_field_to_array - ) - - if other_field_to_value: - if pickle: - if compress: - compress_pickle.dump( - other_field_to_value, - dump_directory_path / self._dump_compressed_pickle_file_name, - compression=self._dump_compression, - ) - else: - with open(dump_directory_path / self._dump_pickle_file_name, "wb") as out_pickle: - pickle_pkg.dump(other_field_to_value, out_pickle) - - else: - with open(dump_directory_path / self._dump_non_array_yaml_name, "w") as out_yaml: - yaml.dump(other_field_to_value, out_yaml) - - return dump_directory_path - - @classmethod - def load( - cls, - output_directory: DirectoryPath, - object_id: str, - pre_load_modifier: Optional[Callable[[dict[str, Any]], dict[str, Any]]] = None, - ) -> "NumpyModelVar": - object_directory_path = cls.model_directory_path(output_directory, object_id) - - npz_file = np.load(object_directory_path / cls._dump_numpy_savez_file_name) - - other_path: FilePath - if (other_path := object_directory_path / cls._dump_compressed_pickle_file_name).exists(): - other_field_to_value = compress_pickle.load(other_path) - elif (other_path := object_directory_path / cls._dump_pickle_file_name).exists(): - with open(other_path, "rb") as in_pickle: - other_field_to_value = pickle_pkg.load(in_pickle) - elif (other_path := object_directory_path / cls._dump_non_array_yaml_name).exists(): - with open(other_path, "r") as in_yaml: - other_field_to_value = yaml.load(in_yaml) - else: - other_field_to_value = {} - - field_to_value = {**npz_file, **other_field_to_value} - if pre_load_modifier: - field_to_value = pre_load_modifier(field_to_value) - - return cls(**field_to_value) - - def _dump_numpy_split_dict(self) -> Tuple[Dict, Dict]: - ndarray_field_to_array, other_field_to_value = {}, {} - for k, v in self.dict(exclude_unset=True).items(): - if isinstance(v, np.ndarray): - ndarray_field_to_array[k] = v - else: - other_field_to_value[k] = v - - return ndarray_field_to_array, other_field_to_value - - @classmethod - @property - def _dump_compressed_pickle_file_name(cls) -> str: - return f"{cls._dump_non_array_file_stem}.pickle.{cls._dump_compression}" - - @classmethod - @property - def _dump_pickle_file_name(cls) -> str: - return f"{cls._dump_non_array_file_stem}.pickle" - - @classmethod - @property - def _dump_non_array_yaml_name(cls) -> str: - return f"{cls._dump_non_array_file_stem}.yaml" - - -NumpyModelVar = TypeVar("NumpyModelVar", bound=NumpyModel) -NumpyModel.update_forward_refs(NumpyModelVar=NumpyModelVar) - - -NumpyModelCLS = Type[NumpyModel] - - -def model_agnostic_load( - output_directory: DirectoryPath, - object_id: str, - models: Iterable[NumpyModelCLS], - not_found_error: bool = False, - **load_kwargs, -) -> Union[NumpyModelVar, None]: - for model in models: - if model.model_directory_path(output_directory, object_id).exists(): - return model.load(output_directory, object_id, **load_kwargs) - if not_found_error: - raise FileNotFoundError( - f"Could not find NumpyModel with {object_id} in {output_directory}." - f"Tried from following classes:\n{', '.join(model.__name__ for model in models)}" - ) - return None diff --git a/pydantic_numpy/model/__init__.py b/pydantic_numpy/model/__init__.py new file mode 100644 index 0000000..0753af0 --- /dev/null +++ b/pydantic_numpy/model/__init__.py @@ -0,0 +1,2 @@ +from pydantic_numpy.model.multi_array import MultiArrayNumpyFile +from pydantic_numpy.model.np_model import NumpyModel diff --git a/pydantic_numpy/model/multi_array.py b/pydantic_numpy/model/multi_array.py new file mode 100644 index 0000000..cb6bcf3 --- /dev/null +++ b/pydantic_numpy/model/multi_array.py @@ -0,0 +1,48 @@ +from functools import lru_cache + +import numpy as np +import numpy.typing as npt +from pydantic import FilePath +from pydantic.dataclasses import dataclass + + +@dataclass(frozen=True) +class MultiArrayNumpyFile: + path: FilePath + key: str + cached_load: bool = False + + def load(self) -> npt.NDArray: + """ + Load the NDArray stored in the given path within the given key + + Returns + ------- + NDArray + """ + loaded = _cached_np_array_load(self.path) if self.cached_load else np.load(self.path) + try: + return loaded[self.key] + except IndexError: + msg = f"The given path points to an uncompressed numpy file, which only has one array in it: {self.path}" + raise AttributeError(msg) + + +@lru_cache +def _cached_np_array_load(path: FilePath): + """ + Store the loaded numpy object within LRU cache in case we need it several times + + Parameters + ---------- + path: FilePath + Path to the numpy file + + Returns + ------- + Same as np.load + """ + return np.load(path) + + +__all__ = ["MultiArrayNumpyFile"] diff --git a/pydantic_numpy/model/np_model.py b/pydantic_numpy/model/np_model.py new file mode 100644 index 0000000..c74e158 --- /dev/null +++ b/pydantic_numpy/model/np_model.py @@ -0,0 +1,236 @@ +import pickle as pickle_pkg +from pathlib import Path +from typing import Any, Callable, ClassVar, Iterable, Optional + +import compress_pickle +import numpy as np +import numpy.typing as npt +from pydantic import BaseModel, DirectoryPath, FilePath, computed_field, validate_call +from ruamel.yaml import YAML + +from pydantic_numpy.util import np_general_all_close + +yaml = YAML() + + +class NumpyModel(BaseModel): + _dump_compression: ClassVar[str] = "lz4" + _dump_numpy_savez_file_name: ClassVar[str] = "arrays.npz" + _dump_non_array_file_stem: ClassVar[str] = "object_info" + + _directory_suffix: ClassVar[str] = ".pdnp" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, NumpyModel): + self_type = self.__pydantic_generic_metadata__["origin"] or self.__class__ + other_type = other.__pydantic_generic_metadata__["origin"] or other.__class__ + + self_ndarray_field_to_array, self_other_field_to_value = self._dump_numpy_split_dict() + other_ndarray_field_to_array, other_other_field_to_value = other._dump_numpy_split_dict() + + return ( + self_type == other_type + and self_other_field_to_value == other_other_field_to_value + and self.__pydantic_private__ == other.__pydantic_private__ + and self.__pydantic_extra__ == other.__pydantic_extra__ + and _compare_np_array_dicts(self_ndarray_field_to_array, other_ndarray_field_to_array) + ) + elif isinstance(other, BaseModel): + return super().__eq__(other) + else: + return NotImplemented # delegate to the other item in the comparison + + @classmethod + @validate_call + def model_directory_path(cls, output_directory: DirectoryPath, object_id: str) -> DirectoryPath: + return output_directory / f"{object_id}.{cls.__name__}{cls._directory_suffix}" + + @classmethod + @validate_call + def load( + cls, + output_directory: DirectoryPath, + object_id: str, + *, + pre_load_modifier: Optional[Callable[[dict[str, Any]], dict[str, Any]]] = None, + ): + """ + Load NumpyModel instance + + Parameters + ---------- + output_directory: DirectoryPath + The root directory where all model instances of interest are stored + object_id: String + The ID of the model instance + pre_load_modifier: Callable[[dict[str, Any]], dict[str, Any]] | None + Optional function that modifies the loaded arrays + + Returns + ------- + NumpyModel instance + """ + object_directory_path = cls.model_directory_path(output_directory, object_id) + + npz_file = np.load(object_directory_path / cls._dump_numpy_savez_file_name) + + other_path: FilePath + if (other_path := object_directory_path / cls._dump_compressed_pickle_file_name).exists(): + other_field_to_value = compress_pickle.load(other_path) + elif (other_path := object_directory_path / cls._dump_pickle_file_name).exists(): + with open(other_path, "rb") as in_pickle: + other_field_to_value = pickle_pkg.load(in_pickle) + elif (other_path := object_directory_path / cls._dump_non_array_yaml_name).exists(): + with open(other_path, "r") as in_yaml: + other_field_to_value = yaml.load(in_yaml) + else: + other_field_to_value = {} + + field_to_value = {**npz_file, **other_field_to_value} + if pre_load_modifier: + field_to_value = pre_load_modifier(field_to_value) + + return cls(**field_to_value) + + @validate_call + def dump( + self, output_directory: Path, object_id: str, *, compress: bool = True, pickle: bool = False + ) -> DirectoryPath: + assert "arbitrary_types_allowed" not in self.model_config or ( + self.model_config["arbitrary_types_allowed"] and pickle + ), "Arbitrary types are only supported in pickle mode" + + dump_directory_path = self.model_directory_path(output_directory, object_id) + dump_directory_path.mkdir(parents=True, exist_ok=True) + + ndarray_field_to_array, other_field_to_value = self._dump_numpy_split_dict() + + if ndarray_field_to_array: + (np.savez_compressed if compress else np.savez)( + dump_directory_path / self._dump_numpy_savez_file_name, **ndarray_field_to_array + ) + + if other_field_to_value: + if pickle: + if compress: + compress_pickle.dump( + other_field_to_value, + dump_directory_path / self._dump_compressed_pickle_file_name, + compression=self._dump_compression, + ) + else: + with open(dump_directory_path / self._dump_pickle_file_name, "wb") as out_pickle: + pickle_pkg.dump(other_field_to_value, out_pickle) + + else: + with open(dump_directory_path / self._dump_non_array_yaml_name, "w") as out_yaml: + yaml.dump(other_field_to_value, out_yaml) + + return dump_directory_path + + def _dump_numpy_split_dict(self) -> tuple[dict, dict]: + ndarray_field_to_array = {} + other_field_to_value = {} + + for k, v in self.model_dump(exclude_unset=True).items(): + if isinstance(v, np.ndarray): + ndarray_field_to_array[k] = v + else: + other_field_to_value[k] = v + + return ndarray_field_to_array, other_field_to_value + + @classmethod # type: ignore[misc] + @computed_field(return_type=str) + @property + def _dump_compressed_pickle_file_name(cls) -> str: + return f"{cls._dump_non_array_file_stem}.pickle.{cls._dump_compression}" + + @classmethod # type: ignore[misc] + @computed_field(return_type=str) + @property + def _dump_pickle_file_name(cls) -> str: + return f"{cls._dump_non_array_file_stem}.pickle" + + @classmethod # type: ignore[misc] + @computed_field(return_type=str) + @property + def _dump_non_array_yaml_name(cls) -> str: + return f"{cls._dump_non_array_file_stem}.yaml" + + +def model_agnostic_load( + output_directory: DirectoryPath, + object_id: str, + models: Iterable[type[NumpyModel]], + not_found_error: bool = False, + **load_kwargs, +) -> Optional[NumpyModel]: + """ + Provided an Iterable containing possible models, and the directory where they have been dumped. Load the first + instance of model that matches the provided object ID. + + Parameters + ---------- + output_directory: DirectoryPath + The root directory where all model instances of interest are stored + object_id: String + The ID of the model instance + models: Iterable[type[NumpyModel]] + All NumpyModel instances of interest, note that they should have differing names + not_found_error: bool + If True, throw error when the respective model instance was not found + load_kwargs + Key-word arguments to pass to the load function + + Returns + ------- + NumpyModel instance if found + """ + for model in models: + if model.model_directory_path(output_directory, object_id).exists(): + return model.load(output_directory, object_id, **load_kwargs) + + if not_found_error: + raise FileNotFoundError( + f"Could not find NumpyModel with {object_id} in {output_directory}." + f"Tried from following classes:\n{', '.join(model.__name__ for model in models)}" + ) + + return None + + +def _compare_np_array_dicts( + dict_a: dict[str, npt.NDArray], dict_b: dict[str, npt.NDArray], rtol: float = 1e-05, atol: float = 1e-08 +) -> bool: + """ + Compare two dictionaries containing numpy arrays as values. + + Parameters: + dict_a, dict_b: dictionaries to compare. They should have same keys. + rtol, atol: relative and absolute tolerances for np.isclose() + + Returns: + Boolean value for each key, True if corresponding arrays are close, else False. + """ + + keys1 = frozenset(dict_a.keys()) + keys2 = frozenset(dict_b.keys()) + + if keys1 != keys2: + raise ValueError("Dictionaries have different keys") + + for key in keys1: + arr_a = dict_a[key] + arr_b = dict_b[key] + + if arr_a.shape != arr_b.shape: + raise ValueError(f"Arrays for key '{key}' have different shapes") + + if not np_general_all_close(arr_a, arr_b, rtol, atol): + return False + + return True + + +__all__ = ["NumpyModel", "model_agnostic_load"] diff --git a/pydantic_numpy/ndarray.py b/pydantic_numpy/ndarray.py deleted file mode 100644 index fa6bbe0..0000000 --- a/pydantic_numpy/ndarray.py +++ /dev/null @@ -1,95 +0,0 @@ -from abc import ABC, abstractmethod -from pathlib import Path -from typing import TYPE_CHECKING, Any, Generic, Mapping, Optional, TypeVar - -import numpy as np -from pydantic import BaseModel, FilePath, validator -from pydantic.fields import ModelField - -if TYPE_CHECKING: - from pydantic.typing import CallableGenerator - - -class NPFileDesc(BaseModel): - path: FilePath = ... - key: Optional[str] - - @validator("path", allow_reuse=True) - def check_absolute(cls, value: Path) -> Path: - return value.resolve().absolute() - - -T = TypeVar("T") -ScalarType = TypeVar("ScalarType", bound=np.generic, covariant=True) - - -class BaseNDArrayType(Generic[T, ScalarType], np.ndarray[T, np.dtype[ScalarType]], ABC): - @classmethod - def __get_validators__(cls) -> "CallableGenerator": - yield cls.validate - - @classmethod - def __modify_schema__(cls, field_schema: dict[str, Any], field: Optional[ModelField]) -> None: - if field and field.sub_fields: - type_with_potential_subtype = f"np.ndarray[{field.sub_fields[0]}]" - else: - type_with_potential_subtype = "np.ndarray" - field_schema.update({"type": type_with_potential_subtype}) - - @classmethod - @abstractmethod - def validate(cls, val: Any, field: ModelField) -> np.ndarray[T, np.dtype[ScalarType]]: - ... - - @staticmethod - def field_validation(val: Any, field: ModelField) -> np.ndarray[T, np.dtype[ScalarType]]: - if isinstance(val, Mapping): - val = NPFileDesc(**val) - - if isinstance(val, NPFileDesc): - val: NPFileDesc - - if val.path.suffix.lower() not in [".npz", ".npy"]: - raise ValueError("Expected npz or npy file.") - - if not val.path.is_file(): - raise ValueError(f"Path does not exist {val.path}") - - try: - content = np.load(str(val.path)) - except FileNotFoundError: - raise ValueError(f"Failed to load numpy data from file {val.path}") - - if val.path.suffix.lower() == ".npz": - key = val.key or content.files[0] - try: - data = content[key] - except KeyError: - raise ValueError(f"Key {key} not found in npz.") - else: - data = content - else: - data = val - - return np.asarray(data, dtype=field.sub_fields[1].type_) if field.sub_fields else np.asarray(data) - - -class PydanticNDArray(Generic[T, ScalarType], BaseNDArrayType[T, ScalarType]): - @classmethod - def validate(cls, val: Any, field: ModelField) -> np.ndarray[T, np.dtype[ScalarType]]: - return cls.field_validation(val, field) - - -class PydanticPotentialNDArray(Generic[T, ScalarType], BaseNDArrayType[T, ScalarType]): - """Like NDArray, but validation errors result in None.""" - - @classmethod - def validate(cls, val: Any, field: ModelField) -> Optional[np.ndarray[T, np.dtype[ScalarType]]]: - try: - return cls.field_validation(val, field) - except ValueError: - return None - - -NDArray = PydanticNDArray -PotentialNDArray = PydanticPotentialNDArray diff --git a/pydantic_numpy/typing/__init__.py b/pydantic_numpy/typing/__init__.py new file mode 100644 index 0000000..19d9d34 --- /dev/null +++ b/pydantic_numpy/typing/__init__.py @@ -0,0 +1,8 @@ +from pydantic_numpy.typing.i_dimensional import * +from pydantic_numpy.typing.ii_dimensional import * +from pydantic_numpy.typing.iii_dimensional import * +from pydantic_numpy.typing.n_dimensional import * +from pydantic_numpy.typing.strict_data_type.i_dimensional import * +from pydantic_numpy.typing.strict_data_type.ii_dimensional import * +from pydantic_numpy.typing.strict_data_type.iii_dimensional import * +from pydantic_numpy.typing.strict_data_type.n_dimensional import * diff --git a/pydantic_numpy/typing/i_dimensional.py b/pydantic_numpy/typing/i_dimensional.py new file mode 100644 index 0000000..fb724bd --- /dev/null +++ b/pydantic_numpy/typing/i_dimensional.py @@ -0,0 +1,54 @@ +import numpy as np + +from pydantic_numpy.helper.annotation import np_array_pydantic_annotated_typing + +Np1DArray = np_array_pydantic_annotated_typing(data_type=None, dimensions=1, strict_data_typing=False) + +Np1DArrayInt64 = np_array_pydantic_annotated_typing(data_type=np.int64, dimensions=1) +Np1DArrayInt32 = np_array_pydantic_annotated_typing(data_type=np.int32, dimensions=1) +Np1DArrayInt16 = np_array_pydantic_annotated_typing(data_type=np.int16, dimensions=1) +Np1DArrayInt8 = np_array_pydantic_annotated_typing(data_type=np.int8, dimensions=1) + +Np1DArrayUint64 = np_array_pydantic_annotated_typing(data_type=np.uint64, dimensions=1) +Np1DArrayUint32 = np_array_pydantic_annotated_typing(data_type=np.uint32, dimensions=1) +Np1DArrayUint16 = np_array_pydantic_annotated_typing(data_type=np.uint16, dimensions=1) +Np1DArrayUint8 = np_array_pydantic_annotated_typing(data_type=np.uint8, dimensions=1) + +Np1DArrayFp128 = np_array_pydantic_annotated_typing(data_type=np.float128, dimensions=1) +Np1DArrayFp64 = np_array_pydantic_annotated_typing(data_type=np.float64, dimensions=1) +Np1DArrayFp32 = np_array_pydantic_annotated_typing(data_type=np.float32, dimensions=1) +Np1DArrayFp16 = np_array_pydantic_annotated_typing(data_type=np.float16, dimensions=1) + +Np1DArrayComplex256 = np_array_pydantic_annotated_typing(data_type=np.complex256, dimensions=1) +Np1DArrayComplex128 = np_array_pydantic_annotated_typing(data_type=np.complex128, dimensions=1) +Np1DArrayComplex64 = np_array_pydantic_annotated_typing(data_type=np.complex64, dimensions=1) + +Np1DArrayBool = np_array_pydantic_annotated_typing(data_type=bool, dimensions=1) + + +# Non-number types +Np1DArrayDatetime64 = np_array_pydantic_annotated_typing(data_type=np.datetime64, dimensions=1) +Np1DArrayTimedelta64 = np_array_pydantic_annotated_typing(data_type=np.timedelta64, dimensions=1) + + +__all__ = [ + "Np1DArray", + "Np1DArrayInt64", + "Np1DArrayInt32", + "Np1DArrayInt16", + "Np1DArrayInt8", + "Np1DArrayUint64", + "Np1DArrayUint32", + "Np1DArrayUint16", + "Np1DArrayUint8", + "Np1DArrayFp128", + "Np1DArrayFp64", + "Np1DArrayFp32", + "Np1DArrayFp16", + "Np1DArrayComplex256", + "Np1DArrayComplex128", + "Np1DArrayComplex64", + "Np1DArrayBool", + "Np1DArrayDatetime64", + "Np1DArrayTimedelta64", +] diff --git a/pydantic_numpy/typing/ii_dimensional.py b/pydantic_numpy/typing/ii_dimensional.py new file mode 100644 index 0000000..03f6138 --- /dev/null +++ b/pydantic_numpy/typing/ii_dimensional.py @@ -0,0 +1,54 @@ +import numpy as np + +from pydantic_numpy.helper.annotation import np_array_pydantic_annotated_typing + +Np2DArray = np_array_pydantic_annotated_typing(data_type=None, dimensions=2, strict_data_typing=False) + +Np2DArrayInt64 = np_array_pydantic_annotated_typing(data_type=np.int64, dimensions=2) +Np2DArrayInt32 = np_array_pydantic_annotated_typing(data_type=np.int32, dimensions=2) +Np2DArrayInt16 = np_array_pydantic_annotated_typing(data_type=np.int16, dimensions=2) +Np2DArrayInt8 = np_array_pydantic_annotated_typing(data_type=np.int8, dimensions=2) + +Np2DArrayUint64 = np_array_pydantic_annotated_typing(data_type=np.uint64, dimensions=2) +Np2DArrayUint32 = np_array_pydantic_annotated_typing(data_type=np.uint32, dimensions=2) +Np2DArrayUint16 = np_array_pydantic_annotated_typing(data_type=np.uint16, dimensions=2) +Np2DArrayUint8 = np_array_pydantic_annotated_typing(data_type=np.uint8, dimensions=2) + +Np2DArrayFp128 = np_array_pydantic_annotated_typing(data_type=np.float128, dimensions=2) +Np2DArrayFp64 = np_array_pydantic_annotated_typing(data_type=np.float64, dimensions=2) +Np2DArrayFp32 = np_array_pydantic_annotated_typing(data_type=np.float32, dimensions=2) +Np2DArrayFp16 = np_array_pydantic_annotated_typing(data_type=np.float16, dimensions=2) + +Np2DArrayComplex256 = np_array_pydantic_annotated_typing(data_type=np.complex256, dimensions=2) +Np2DArrayComplex128 = np_array_pydantic_annotated_typing(data_type=np.complex128, dimensions=2) +Np2DArrayComplex64 = np_array_pydantic_annotated_typing(data_type=np.complex64, dimensions=2) + +Np2DArrayBool = np_array_pydantic_annotated_typing(data_type=bool, dimensions=2) + + +# Non-number types +Np2DArrayDatetime64 = np_array_pydantic_annotated_typing(data_type=np.datetime64, dimensions=2) +Np2DArrayTimedelta64 = np_array_pydantic_annotated_typing(data_type=np.timedelta64, dimensions=2) + + +__all__ = [ + "Np2DArray", + "Np2DArrayInt64", + "Np2DArrayInt32", + "Np2DArrayInt16", + "Np2DArrayInt8", + "Np2DArrayUint64", + "Np2DArrayUint32", + "Np2DArrayUint16", + "Np2DArrayUint8", + "Np2DArrayFp128", + "Np2DArrayFp64", + "Np2DArrayFp32", + "Np2DArrayFp16", + "Np2DArrayComplex256", + "Np2DArrayComplex128", + "Np2DArrayComplex64", + "Np2DArrayBool", + "Np2DArrayDatetime64", + "Np2DArrayTimedelta64", +] diff --git a/pydantic_numpy/typing/iii_dimensional.py b/pydantic_numpy/typing/iii_dimensional.py new file mode 100644 index 0000000..0241141 --- /dev/null +++ b/pydantic_numpy/typing/iii_dimensional.py @@ -0,0 +1,53 @@ +import numpy as np + +from pydantic_numpy.helper.annotation import np_array_pydantic_annotated_typing + +Np3DArray = np_array_pydantic_annotated_typing(data_type=None, dimensions=3, strict_data_typing=False) + +Np3DArrayInt64 = np_array_pydantic_annotated_typing(data_type=np.int64, dimensions=3) +Np3DArrayInt32 = np_array_pydantic_annotated_typing(data_type=np.int32, dimensions=3) +Np3DArrayInt16 = np_array_pydantic_annotated_typing(data_type=np.int16, dimensions=3) +Np3DArrayInt8 = np_array_pydantic_annotated_typing(data_type=np.int8, dimensions=3) + +Np3DArrayUint64 = np_array_pydantic_annotated_typing(data_type=np.uint64, dimensions=3) +Np3DArrayUint32 = np_array_pydantic_annotated_typing(data_type=np.uint32, dimensions=3) +Np3DArrayUint16 = np_array_pydantic_annotated_typing(data_type=np.uint16, dimensions=3) +Np3DArrayUint8 = np_array_pydantic_annotated_typing(data_type=np.uint8, dimensions=3) + +Np3DArrayFp128 = np_array_pydantic_annotated_typing(data_type=np.float128, dimensions=3) +Np3DArrayFp64 = np_array_pydantic_annotated_typing(data_type=np.float64, dimensions=3) +Np3DArrayFp32 = np_array_pydantic_annotated_typing(data_type=np.float32, dimensions=3) +Np3DArrayFp16 = np_array_pydantic_annotated_typing(data_type=np.float16, dimensions=3) + +Np3DArrayComplex256 = np_array_pydantic_annotated_typing(data_type=np.complex256, dimensions=3) +Np3DArrayComplex128 = np_array_pydantic_annotated_typing(data_type=np.complex128, dimensions=3) +Np3DArrayComplex64 = np_array_pydantic_annotated_typing(data_type=np.complex64, dimensions=3) + +Np3DArrayBool = np_array_pydantic_annotated_typing(data_type=bool, dimensions=3) + + +# Non-number types +Np3DArrayDatetime64 = np_array_pydantic_annotated_typing(data_type=np.datetime64, dimensions=3) +Np3DArrayTimedelta64 = np_array_pydantic_annotated_typing(data_type=np.timedelta64, dimensions=3) + +__all__ = [ + "Np3DArray", + "Np3DArrayInt64", + "Np3DArrayInt32", + "Np3DArrayInt16", + "Np3DArrayInt8", + "Np3DArrayUint64", + "Np3DArrayUint32", + "Np3DArrayUint16", + "Np3DArrayUint8", + "Np3DArrayFp128", + "Np3DArrayFp64", + "Np3DArrayFp32", + "Np3DArrayFp16", + "Np3DArrayComplex256", + "Np3DArrayComplex128", + "Np3DArrayComplex64", + "Np3DArrayBool", + "Np3DArrayDatetime64", + "Np3DArrayTimedelta64", +] diff --git a/pydantic_numpy/typing/n_dimensional.py b/pydantic_numpy/typing/n_dimensional.py new file mode 100644 index 0000000..23a2fc0 --- /dev/null +++ b/pydantic_numpy/typing/n_dimensional.py @@ -0,0 +1,54 @@ +import numpy as np + +from pydantic_numpy.helper.annotation import np_array_pydantic_annotated_typing + +NpNDArray = np_array_pydantic_annotated_typing(data_type=None, dimensions=None, strict_data_typing=False) + +NpNDArrayInt64 = np_array_pydantic_annotated_typing(data_type=np.int64) +NpNDArrayInt32 = np_array_pydantic_annotated_typing(data_type=np.int32) +NpNDArrayInt16 = np_array_pydantic_annotated_typing(data_type=np.int16) +NpNDArrayInt8 = np_array_pydantic_annotated_typing(data_type=np.int8) + +NpNDArrayUint64 = np_array_pydantic_annotated_typing(data_type=np.uint64) +NpNDArrayUint32 = np_array_pydantic_annotated_typing(data_type=np.uint32) +NpNDArrayUint16 = np_array_pydantic_annotated_typing(data_type=np.uint16) +NpNDArrayUint8 = np_array_pydantic_annotated_typing(data_type=np.uint8) + +NpNDArrayFp128 = np_array_pydantic_annotated_typing(data_type=np.float128) +NpNDArrayFp64 = np_array_pydantic_annotated_typing(data_type=np.float64) +NpNDArrayFp32 = np_array_pydantic_annotated_typing(data_type=np.float32) +NpNDArrayFp16 = np_array_pydantic_annotated_typing(data_type=np.float16) + +NpNDArrayComplex256 = np_array_pydantic_annotated_typing(data_type=np.complex256) +NpNDArrayComplex128 = np_array_pydantic_annotated_typing(data_type=np.complex128) +NpNDArrayComplex64 = np_array_pydantic_annotated_typing(data_type=np.complex64) + +NpNDArrayBool = np_array_pydantic_annotated_typing(data_type=bool) + + +# Non-number types +NpNDArrayDatetime64 = np_array_pydantic_annotated_typing(data_type=np.datetime64) +NpNDArrayTimedelta64 = np_array_pydantic_annotated_typing(data_type=np.timedelta64) + + +__all__ = [ + "NpNDArray", + "NpNDArrayInt64", + "NpNDArrayInt32", + "NpNDArrayInt16", + "NpNDArrayInt8", + "NpNDArrayUint64", + "NpNDArrayUint32", + "NpNDArrayUint16", + "NpNDArrayUint8", + "NpNDArrayFp128", + "NpNDArrayFp64", + "NpNDArrayFp32", + "NpNDArrayFp16", + "NpNDArrayComplex256", + "NpNDArrayComplex128", + "NpNDArrayComplex64", + "NpNDArrayBool", + "NpNDArrayDatetime64", + "NpNDArrayTimedelta64", +] diff --git a/pydantic_numpy/typing/strict_data_type/__init__.py b/pydantic_numpy/typing/strict_data_type/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pydantic_numpy/typing/strict_data_type/i_dimensional.py b/pydantic_numpy/typing/strict_data_type/i_dimensional.py new file mode 100644 index 0000000..60ceb5f --- /dev/null +++ b/pydantic_numpy/typing/strict_data_type/i_dimensional.py @@ -0,0 +1,61 @@ +import numpy as np + +from pydantic_numpy.helper.annotation import np_array_pydantic_annotated_typing + +NpStrict1DArrayInt64 = np_array_pydantic_annotated_typing(data_type=np.int64, dimensions=1, strict_data_typing=True) +NpStrict1DArrayInt32 = np_array_pydantic_annotated_typing(data_type=np.int32, dimensions=1, strict_data_typing=True) +NpStrict1DArrayInt16 = np_array_pydantic_annotated_typing(data_type=np.int16, dimensions=1, strict_data_typing=True) +NpStrict1DArrayInt8 = np_array_pydantic_annotated_typing(data_type=np.int8, dimensions=1, strict_data_typing=True) + +NpStrict1DArrayUint64 = np_array_pydantic_annotated_typing(data_type=np.uint64, dimensions=1, strict_data_typing=True) +NpStrict1DArrayUint32 = np_array_pydantic_annotated_typing(data_type=np.uint32, dimensions=1, strict_data_typing=True) +NpStrict1DArrayUint16 = np_array_pydantic_annotated_typing(data_type=np.uint16, dimensions=1, strict_data_typing=True) +NpStrict1DArrayUint8 = np_array_pydantic_annotated_typing(data_type=np.uint8, dimensions=1, strict_data_typing=True) + +NpStrict1DArrayFp128 = np_array_pydantic_annotated_typing(data_type=np.float128, dimensions=1, strict_data_typing=True) +NpStrict1DArrayFp64 = np_array_pydantic_annotated_typing(data_type=np.float64, dimensions=1, strict_data_typing=True) +NpStrict1DArrayFp32 = np_array_pydantic_annotated_typing(data_type=np.float32, dimensions=1, strict_data_typing=True) +NpStrict1DArrayFp16 = np_array_pydantic_annotated_typing(data_type=np.float16, dimensions=1, strict_data_typing=True) + +NpStrict1DArrayComplex256 = np_array_pydantic_annotated_typing( + data_type=np.complex256, dimensions=1, strict_data_typing=True +) +NpStrict1DArrayComplex128 = np_array_pydantic_annotated_typing( + data_type=np.complex128, dimensions=1, strict_data_typing=True +) +NpStrict1DArrayComplex64 = np_array_pydantic_annotated_typing( + data_type=np.complex64, dimensions=1, strict_data_typing=True +) + +NpStrict1DArrayBool = np_array_pydantic_annotated_typing(data_type=bool, dimensions=1, strict_data_typing=True) + + +# Non-number types +NpStrict1DArrayDatetime64 = np_array_pydantic_annotated_typing( + data_type=np.datetime64, dimensions=1, strict_data_typing=True +) +NpStrict1DArrayTimedelta64 = np_array_pydantic_annotated_typing( + data_type=np.timedelta64, dimensions=1, strict_data_typing=True +) + + +__all__ = [ + "NpStrict1DArrayInt64", + "NpStrict1DArrayInt32", + "NpStrict1DArrayInt16", + "NpStrict1DArrayInt8", + "NpStrict1DArrayUint64", + "NpStrict1DArrayUint32", + "NpStrict1DArrayUint16", + "NpStrict1DArrayUint8", + "NpStrict1DArrayFp128", + "NpStrict1DArrayFp64", + "NpStrict1DArrayFp32", + "NpStrict1DArrayFp16", + "NpStrict1DArrayComplex256", + "NpStrict1DArrayComplex128", + "NpStrict1DArrayComplex64", + "NpStrict1DArrayBool", + "NpStrict1DArrayDatetime64", + "NpStrict1DArrayTimedelta64", +] diff --git a/pydantic_numpy/typing/strict_data_type/ii_dimensional.py b/pydantic_numpy/typing/strict_data_type/ii_dimensional.py new file mode 100644 index 0000000..62d8fba --- /dev/null +++ b/pydantic_numpy/typing/strict_data_type/ii_dimensional.py @@ -0,0 +1,61 @@ +import numpy as np + +from pydantic_numpy.helper.annotation import np_array_pydantic_annotated_typing + +NpStrict2DArrayInt64 = np_array_pydantic_annotated_typing(data_type=np.int64, dimensions=2, strict_data_typing=True) +NpStrict2DArrayInt32 = np_array_pydantic_annotated_typing(data_type=np.int32, dimensions=2, strict_data_typing=True) +NpStrict2DArrayInt16 = np_array_pydantic_annotated_typing(data_type=np.int16, dimensions=2, strict_data_typing=True) +NpStrict2DArrayInt8 = np_array_pydantic_annotated_typing(data_type=np.int8, dimensions=2, strict_data_typing=True) + +NpStrict2DArrayUint64 = np_array_pydantic_annotated_typing(data_type=np.uint64, dimensions=2, strict_data_typing=True) +NpStrict2DArrayUint32 = np_array_pydantic_annotated_typing(data_type=np.uint32, dimensions=2, strict_data_typing=True) +NpStrict2DArrayUint16 = np_array_pydantic_annotated_typing(data_type=np.uint16, dimensions=2, strict_data_typing=True) +NpStrict2DArrayUint8 = np_array_pydantic_annotated_typing(data_type=np.uint8, dimensions=2, strict_data_typing=True) + +NpStrict2DArrayFp128 = np_array_pydantic_annotated_typing(data_type=np.float128, dimensions=2, strict_data_typing=True) +NpStrict2DArrayFp64 = np_array_pydantic_annotated_typing(data_type=np.float64, dimensions=2, strict_data_typing=True) +NpStrict2DArrayFp32 = np_array_pydantic_annotated_typing(data_type=np.float32, dimensions=2, strict_data_typing=True) +NpStrict2DArrayFp16 = np_array_pydantic_annotated_typing(data_type=np.float16, dimensions=2, strict_data_typing=True) + +NpStrict2DArrayComplex256 = np_array_pydantic_annotated_typing( + data_type=np.complex256, dimensions=2, strict_data_typing=True +) +NpStrict2DArrayComplex128 = np_array_pydantic_annotated_typing( + data_type=np.complex128, dimensions=2, strict_data_typing=True +) +NpStrict2DArrayComplex64 = np_array_pydantic_annotated_typing( + data_type=np.complex64, dimensions=2, strict_data_typing=True +) + +NpStrict2DArrayBool = np_array_pydantic_annotated_typing(data_type=bool, dimensions=2, strict_data_typing=True) + + +# Non-number types +NpStrict2DArrayDatetime64 = np_array_pydantic_annotated_typing( + data_type=np.datetime64, dimensions=2, strict_data_typing=True +) +NpStrict2DArrayTimedelta64 = np_array_pydantic_annotated_typing( + data_type=np.timedelta64, dimensions=2, strict_data_typing=True +) + + +__all__ = [ + "NpStrict2DArrayInt64", + "NpStrict2DArrayInt32", + "NpStrict2DArrayInt16", + "NpStrict2DArrayInt8", + "NpStrict2DArrayUint64", + "NpStrict2DArrayUint32", + "NpStrict2DArrayUint16", + "NpStrict2DArrayUint8", + "NpStrict2DArrayFp128", + "NpStrict2DArrayFp64", + "NpStrict2DArrayFp32", + "NpStrict2DArrayFp16", + "NpStrict2DArrayComplex256", + "NpStrict2DArrayComplex128", + "NpStrict2DArrayComplex64", + "NpStrict2DArrayBool", + "NpStrict2DArrayDatetime64", + "NpStrict2DArrayTimedelta64", +] diff --git a/pydantic_numpy/typing/strict_data_type/iii_dimensional.py b/pydantic_numpy/typing/strict_data_type/iii_dimensional.py new file mode 100644 index 0000000..e5fbac4 --- /dev/null +++ b/pydantic_numpy/typing/strict_data_type/iii_dimensional.py @@ -0,0 +1,60 @@ +import numpy as np + +from pydantic_numpy.helper.annotation import np_array_pydantic_annotated_typing + +NpStrict3DArrayInt64 = np_array_pydantic_annotated_typing(data_type=np.int64, dimensions=3, strict_data_typing=True) +NpStrict3DArrayInt32 = np_array_pydantic_annotated_typing(data_type=np.int32, dimensions=3, strict_data_typing=True) +NpStrict3DArrayInt16 = np_array_pydantic_annotated_typing(data_type=np.int16, dimensions=3, strict_data_typing=True) +NpStrict3DArrayInt8 = np_array_pydantic_annotated_typing(data_type=np.int8, dimensions=3, strict_data_typing=True) + +NpStrict3DArrayUint64 = np_array_pydantic_annotated_typing(data_type=np.uint64, dimensions=3, strict_data_typing=True) +NpStrict3DArrayUint32 = np_array_pydantic_annotated_typing(data_type=np.uint32, dimensions=3, strict_data_typing=True) +NpStrict3DArrayUint16 = np_array_pydantic_annotated_typing(data_type=np.uint16, dimensions=3, strict_data_typing=True) +NpStrict3DArrayUint8 = np_array_pydantic_annotated_typing(data_type=np.uint8, dimensions=3, strict_data_typing=True) + +NpStrict3DArrayFp128 = np_array_pydantic_annotated_typing(data_type=np.float128, dimensions=3, strict_data_typing=True) +NpStrict3DArrayFp64 = np_array_pydantic_annotated_typing(data_type=np.float64, dimensions=3, strict_data_typing=True) +NpStrict3DArrayFp32 = np_array_pydantic_annotated_typing(data_type=np.float32, dimensions=3, strict_data_typing=True) +NpStrict3DArrayFp16 = np_array_pydantic_annotated_typing(data_type=np.float16, dimensions=3, strict_data_typing=True) + +NpStrict3DArrayComplex256 = np_array_pydantic_annotated_typing( + data_type=np.complex256, dimensions=3, strict_data_typing=True +) +NpStrict3DArrayComplex128 = np_array_pydantic_annotated_typing( + data_type=np.complex128, dimensions=3, strict_data_typing=True +) +NpStrict3DArrayComplex64 = np_array_pydantic_annotated_typing( + data_type=np.complex64, dimensions=3, strict_data_typing=True +) + +NpStrict3DArrayBool = np_array_pydantic_annotated_typing(data_type=bool, dimensions=3, strict_data_typing=True) + + +# Non-number types +NpStrict3DArrayDatetime64 = np_array_pydantic_annotated_typing( + data_type=np.datetime64, dimensions=3, strict_data_typing=True +) +NpStrict3DArrayTimedelta64 = np_array_pydantic_annotated_typing( + data_type=np.timedelta64, dimensions=3, strict_data_typing=True +) + +__all__ = [ + "NpStrict3DArrayInt64", + "NpStrict3DArrayInt32", + "NpStrict3DArrayInt16", + "NpStrict3DArrayInt8", + "NpStrict3DArrayUint64", + "NpStrict3DArrayUint32", + "NpStrict3DArrayUint16", + "NpStrict3DArrayUint8", + "NpStrict3DArrayFp128", + "NpStrict3DArrayFp64", + "NpStrict3DArrayFp32", + "NpStrict3DArrayFp16", + "NpStrict3DArrayComplex256", + "NpStrict3DArrayComplex128", + "NpStrict3DArrayComplex64", + "NpStrict3DArrayBool", + "NpStrict3DArrayDatetime64", + "NpStrict3DArrayTimedelta64", +] diff --git a/pydantic_numpy/typing/strict_data_type/n_dimensional.py b/pydantic_numpy/typing/strict_data_type/n_dimensional.py new file mode 100644 index 0000000..445abfe --- /dev/null +++ b/pydantic_numpy/typing/strict_data_type/n_dimensional.py @@ -0,0 +1,51 @@ +import numpy as np + +from pydantic_numpy.helper.annotation import np_array_pydantic_annotated_typing + +NpStrictNDArrayInt64 = np_array_pydantic_annotated_typing(data_type=np.int64, strict_data_typing=True) +NpStrictNDArrayInt32 = np_array_pydantic_annotated_typing(data_type=np.int32, strict_data_typing=True) +NpStrictNDArrayInt16 = np_array_pydantic_annotated_typing(data_type=np.int16, strict_data_typing=True) +NpStrictNDArrayInt8 = np_array_pydantic_annotated_typing(data_type=np.int8, strict_data_typing=True) + +NpStrictNDArrayUint64 = np_array_pydantic_annotated_typing(data_type=np.uint64, strict_data_typing=True) +NpStrictNDArrayUint32 = np_array_pydantic_annotated_typing(data_type=np.uint32, strict_data_typing=True) +NpStrictNDArrayUint16 = np_array_pydantic_annotated_typing(data_type=np.uint16, strict_data_typing=True) +NpStrictNDArrayUint8 = np_array_pydantic_annotated_typing(data_type=np.uint8, strict_data_typing=True) + +NpStrictNDArrayFp128 = np_array_pydantic_annotated_typing(data_type=np.float128, strict_data_typing=True) +NpStrictNDArrayFp64 = np_array_pydantic_annotated_typing(data_type=np.float64, strict_data_typing=True) +NpStrictNDArrayFp32 = np_array_pydantic_annotated_typing(data_type=np.float32, strict_data_typing=True) +NpStrictNDArrayFp16 = np_array_pydantic_annotated_typing(data_type=np.float16, strict_data_typing=True) + +NpStrictNDArrayComplex256 = np_array_pydantic_annotated_typing(data_type=np.complex256, strict_data_typing=True) +NpStrictNDArrayComplex128 = np_array_pydantic_annotated_typing(data_type=np.complex128, strict_data_typing=True) +NpStrictNDArrayComplex64 = np_array_pydantic_annotated_typing(data_type=np.complex64, strict_data_typing=True) + +NpStrictNDArrayBool = np_array_pydantic_annotated_typing(data_type=bool, strict_data_typing=True) + + +# Non-number types +NpStrictNDArrayDatetime64 = np_array_pydantic_annotated_typing(data_type=np.datetime64, strict_data_typing=True) +NpStrictNDArrayTimedelta64 = np_array_pydantic_annotated_typing(data_type=np.timedelta64, strict_data_typing=True) + + +__all__ = [ + "NpStrictNDArrayInt64", + "NpStrictNDArrayInt32", + "NpStrictNDArrayInt16", + "NpStrictNDArrayInt8", + "NpStrictNDArrayUint64", + "NpStrictNDArrayUint32", + "NpStrictNDArrayUint16", + "NpStrictNDArrayUint8", + "NpStrictNDArrayFp128", + "NpStrictNDArrayFp64", + "NpStrictNDArrayFp32", + "NpStrictNDArrayFp16", + "NpStrictNDArrayComplex256", + "NpStrictNDArrayComplex128", + "NpStrictNDArrayComplex64", + "NpStrictNDArrayBool", + "NpStrictNDArrayDatetime64", + "NpStrictNDArrayTimedelta64", +] diff --git a/pydantic_numpy/util.py b/pydantic_numpy/util.py new file mode 100644 index 0000000..46de7b3 --- /dev/null +++ b/pydantic_numpy/util.py @@ -0,0 +1,29 @@ +import numpy as np +import numpy.typing as npt +from numpy.core._exceptions import UFuncTypeError +from numpy.exceptions import DTypePromotionError + + +def np_general_all_close(arr_a: npt.NDArray, arr_b: npt.NDArray, rtol: float = 1e-05, atol: float = 1e-08) -> bool: + """ + Data type agnostic function to define if two numpy array have elements that are close + + Parameters + ---------- + arr_a: npt.NDArray + arr_b: npt.NDArray + rtol: float + See np.allclose + atol: float + See np.allclose + + Returns + ------- + Bool + """ + try: + return np.allclose(arr_a, arr_b, rtol=rtol, atol=atol, equal_nan=True) + except UFuncTypeError: + return np.allclose(arr_a.astype(np.float64), arr_b.astype(np.float64), rtol=rtol, atol=atol, equal_nan=True) + except DTypePromotionError: + return bool(np.all(arr_a == arr_b)) diff --git a/pyproject.toml b/pyproject.toml index dfc8ee5..bac15a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] name = "pydantic_numpy" -version = "2.2.2" -description = "Seamlessly integrate numpy arrays into pydantic models" -authors = ["Can H. Tartanoglu ", "Christoph Heindl"] -license = "MIT" - +version = "3.1.0" +description = "Pydantic Model integration of the NumPy array" +authors = ["Can H. Tartanoglu", "Christoph Heindl"] +maintainers = ["Can H. Tartanoglu "] readme = "README.md" -repository = "https://github.com/caniko/pydantic-numpy" +homepage = "https://github.com/caniko/pydantic-numpy" +license = "BSD-4" keywords = ["pydantic", "numpy", "typing"] classifiers = [ @@ -23,13 +23,17 @@ compress-pickle = { version = "*", extras = ["lz4"] } ruamel-yaml = "^0.17.21" numpy = "*" -pydantic = "^1.10.0" +pydantic = "^2.0" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.4.0" +parameterized = "^0.9.0" +hypothesis = "^6.82.0" +setuptools = "^68.0.0" + -[tool.poetry.dev-dependencies] -pytest = "*" -black = "*" -isort = "*" -pre-commit = "*" +[tool.poetry.group.ci.dependencies] +mypy = "^1.4.1" [tool.black] line-length = 120 @@ -40,6 +44,7 @@ profile = "black" [tool.ruff] line-length = 120 +ignore-init-module-imports = true [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/tests/helper/__init__.py b/tests/helper/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/helper/cache.py b/tests/helper/cache.py new file mode 100644 index 0000000..abb533a --- /dev/null +++ b/tests/helper/cache.py @@ -0,0 +1,11 @@ +from functools import cache + +from pydantic import BaseModel + + +@cache +def cached_calculation(array_type_hint) -> type[BaseModel]: + class ModelForTesting(BaseModel): + array_field: array_type_hint + + return ModelForTesting diff --git a/tests/helper/groups.py b/tests/helper/groups.py new file mode 100644 index 0000000..2d023ab --- /dev/null +++ b/tests/helper/groups.py @@ -0,0 +1,206 @@ +import numpy as np + +from pydantic_numpy.typing import * + +supported_data_types = ( + np.int64, + np.int32, + np.int16, + np.int8, + np.uint64, + np.uint32, + np.uint16, + np.uint8, + np.float128, + np.float64, + np.float32, + np.float16, + np.complex256, + np.complex128, + np.complex64, + np.datetime64, + np.timedelta64, +) + +data_type_1d_array_typing_dimensions = [ + (np.int64, Np1DArrayInt64, 1), + (np.int32, Np1DArrayInt32, 1), + (np.int16, Np1DArrayInt16, 1), + (np.int8, Np1DArrayInt8, 1), + (np.uint64, Np1DArrayUint64, 1), + (np.uint32, Np1DArrayUint32, 1), + (np.uint16, Np1DArrayUint16, 1), + (np.uint8, Np1DArrayUint8, 1), + (np.float128, Np1DArrayFp128, 1), + (np.float64, Np1DArrayFp64, 1), + (np.float32, Np1DArrayFp32, 1), + (np.float16, Np1DArrayFp16, 1), + (np.complex256, Np1DArrayComplex256, 1), + (np.complex128, Np1DArrayComplex128, 1), + (np.complex64, Np1DArrayComplex64, 1), + (bool, Np1DArrayBool, 1), + (np.datetime64, Np1DArrayDatetime64, 1), + (np.timedelta64, Np1DArrayTimedelta64, 1), +] +data_type_2d_array_typing_dimensions = [ + (np.int64, Np2DArrayInt64, 2), + (np.int32, Np2DArrayInt32, 2), + (np.int16, Np2DArrayInt16, 2), + (np.int8, Np2DArrayInt8, 2), + (np.uint64, Np2DArrayUint64, 2), + (np.uint32, Np2DArrayUint32, 2), + (np.uint16, Np2DArrayUint16, 2), + (np.uint8, Np2DArrayUint8, 2), + (np.float128, Np2DArrayFp128, 2), + (np.float64, Np2DArrayFp64, 2), + (np.float32, Np2DArrayFp32, 2), + (np.float16, Np2DArrayFp16, 2), + (np.complex256, Np2DArrayComplex256, 2), + (np.complex128, Np2DArrayComplex128, 2), + (np.complex64, Np2DArrayComplex64, 2), + (bool, Np2DArrayBool, 2), + (np.datetime64, Np2DArrayDatetime64, 2), + (np.timedelta64, Np2DArrayTimedelta64, 2), +] +data_type_3d_array_typing_dimensions = [ + (np.int64, Np3DArrayInt64, 3), + (np.int32, Np3DArrayInt32, 3), + (np.int16, Np3DArrayInt16, 3), + (np.int8, Np3DArrayInt8, 3), + (np.uint64, Np3DArrayUint64, 3), + (np.uint32, Np3DArrayUint32, 3), + (np.uint16, Np3DArrayUint16, 3), + (np.uint8, Np3DArrayUint8, 3), + (np.float128, Np3DArrayFp128, 3), + (np.float64, Np3DArrayFp64, 3), + (np.float32, Np3DArrayFp32, 3), + (np.float16, Np3DArrayFp16, 3), + (np.complex256, Np3DArrayComplex256, 3), + (np.complex128, Np3DArrayComplex128, 3), + (np.complex64, Np3DArrayComplex64, 3), + (bool, Np3DArrayBool, 3), + (np.datetime64, Np3DArrayDatetime64, 3), + (np.timedelta64, Np3DArrayTimedelta64, 3), +] +data_type_nd_array_typing_dimensions = [ + (np.int64, NpNDArrayInt64, None), + (np.int32, NpNDArrayInt32, None), + (np.int16, NpNDArrayInt16, None), + (np.int8, NpNDArrayInt8, None), + (np.uint64, NpNDArrayUint64, None), + (np.uint32, NpNDArrayUint32, None), + (np.uint16, NpNDArrayUint16, None), + (np.uint8, NpNDArrayUint8, None), + (np.float128, NpNDArrayFp128, None), + (np.float64, NpNDArrayFp64, None), + (np.float32, NpNDArrayFp32, None), + (np.float16, NpNDArrayFp16, None), + (np.complex256, NpNDArrayComplex256, None), + (np.complex128, NpNDArrayComplex128, None), + (np.complex64, NpNDArrayComplex64, None), + (bool, NpNDArrayBool, None), + (np.datetime64, NpNDArrayDatetime64, None), + (np.timedelta64, NpNDArrayTimedelta64, None), +] + +data_type_array_typing_dimensions = [ + *data_type_1d_array_typing_dimensions, + *data_type_2d_array_typing_dimensions, + *data_type_3d_array_typing_dimensions, + *data_type_nd_array_typing_dimensions, +] + +# Data type strict +strict_data_type_1d_array_typing_dimensions = [ + (np.int64, NpStrict1DArrayInt64, 1), + (np.int32, NpStrict1DArrayInt32, 1), + (np.int16, NpStrict1DArrayInt16, 1), + (np.int8, NpStrict1DArrayInt8, 1), + (np.uint64, NpStrict1DArrayUint64, 1), + (np.uint32, NpStrict1DArrayUint32, 1), + (np.uint16, NpStrict1DArrayUint16, 1), + (np.uint8, NpStrict1DArrayUint8, 1), + (np.float128, NpStrict1DArrayFp128, 1), + (np.float64, NpStrict1DArrayFp64, 1), + (np.float32, NpStrict1DArrayFp32, 1), + (np.float16, NpStrict1DArrayFp16, 1), + (np.complex256, NpStrict1DArrayComplex256, 1), + (np.complex128, NpStrict1DArrayComplex128, 1), + (np.complex64, NpStrict1DArrayComplex64, 1), + (bool, NpStrict1DArrayBool, 1), + (np.datetime64, NpStrict1DArrayDatetime64, 1), + (np.timedelta64, NpStrict1DArrayTimedelta64, 1), +] +strict_data_type_2d_array_typing_dimensions = [ + (np.int64, NpStrict2DArrayInt64, 2), + (np.int32, NpStrict2DArrayInt32, 2), + (np.int16, NpStrict2DArrayInt16, 2), + (np.int8, NpStrict2DArrayInt8, 2), + (np.uint64, NpStrict2DArrayUint64, 2), + (np.uint32, NpStrict2DArrayUint32, 2), + (np.uint16, NpStrict2DArrayUint16, 2), + (np.uint8, NpStrict2DArrayUint8, 2), + (np.float128, NpStrict2DArrayFp128, 2), + (np.float64, NpStrict2DArrayFp64, 2), + (np.float32, NpStrict2DArrayFp32, 2), + (np.float16, NpStrict2DArrayFp16, 2), + (np.complex256, NpStrict2DArrayComplex256, 2), + (np.complex128, NpStrict2DArrayComplex128, 2), + (np.complex64, NpStrict2DArrayComplex64, 2), + (bool, NpStrict2DArrayBool, 2), + (np.datetime64, NpStrict2DArrayDatetime64, 2), + (np.timedelta64, NpStrict2DArrayTimedelta64, 2), +] +strict_data_type_3d_array_typing_dimensions = [ + (np.int64, NpStrict3DArrayInt64, 3), + (np.int32, NpStrict3DArrayInt32, 3), + (np.int16, NpStrict3DArrayInt16, 3), + (np.int8, NpStrict3DArrayInt8, 3), + (np.uint64, NpStrict3DArrayUint64, 3), + (np.uint32, NpStrict3DArrayUint32, 3), + (np.uint16, NpStrict3DArrayUint16, 3), + (np.uint8, NpStrict3DArrayUint8, 3), + (np.float128, NpStrict3DArrayFp128, 3), + (np.float64, NpStrict3DArrayFp64, 3), + (np.float32, NpStrict3DArrayFp32, 3), + (np.float16, NpStrict3DArrayFp16, 3), + (np.complex256, NpStrict3DArrayComplex256, 3), + (np.complex128, NpStrict3DArrayComplex128, 3), + (np.complex64, NpStrict3DArrayComplex64, 3), + (bool, NpStrict3DArrayBool, 3), + (np.datetime64, NpStrict3DArrayDatetime64, 3), + (np.timedelta64, NpStrict3DArrayTimedelta64, 3), +] +strict_data_type_nd_array_typing_dimensions = [ + (np.int64, NpStrictNDArrayInt64, None), + (np.int32, NpStrictNDArrayInt32, None), + (np.int16, NpStrictNDArrayInt16, None), + (np.int8, NpStrictNDArrayInt8, None), + (np.uint64, NpStrictNDArrayUint64, None), + (np.uint32, NpStrictNDArrayUint32, None), + (np.uint16, NpStrictNDArrayUint16, None), + (np.uint8, NpStrictNDArrayUint8, None), + (np.float128, NpStrictNDArrayFp128, None), + (np.float64, NpStrictNDArrayFp64, None), + (np.float32, NpStrictNDArrayFp32, None), + (np.float16, NpStrictNDArrayFp16, None), + (np.complex256, NpStrictNDArrayComplex256, None), + (np.complex128, NpStrictNDArrayComplex128, None), + (np.complex64, NpStrictNDArrayComplex64, None), + (bool, NpStrictNDArrayBool, None), + (np.datetime64, NpStrictNDArrayDatetime64, None), + (np.timedelta64, NpStrictNDArrayTimedelta64, None), +] + +strict_data_type_array_typing_dimensions = [ + *strict_data_type_1d_array_typing_dimensions, + *strict_data_type_2d_array_typing_dimensions, + *strict_data_type_3d_array_typing_dimensions, + *strict_data_type_nd_array_typing_dimensions, +] + +dimension_testing_group = [ + (np.int64, Np1DArrayInt64, 1), + (np.int64, Np2DArrayInt64, 2), + (np.int64, Np3DArrayInt64, 3), +] diff --git a/tests/test_dtype.py b/tests/test_dtype.py deleted file mode 100644 index 9650257..0000000 --- a/tests/test_dtype.py +++ /dev/null @@ -1,68 +0,0 @@ -import sys - -import numpy as np -import pytest -from pydantic import BaseModel - -import pydantic_numpy.dtype as pnd - -try: - np_float128 = np.float128 -except AttributeError: - # Not available on windows. - np_float128 = None - -try: - np_complex256 = np.complex256 -except AttributeError: - # Not available on windows. - np_complex256 = None - - -@pytest.mark.parametrize("data", (1, 1.0)) -@pytest.mark.parametrize( - "pnp_dtype,np_dtype", - ( - (pnd.float16, np.float16), - (pnd.float32, np.float32), - (pnd.float64, np.float64), - pytest.param( - pnd.float128, - np_float128, - marks=pytest.mark.skipif(sys.platform == "win32", reason="dtype is not available on windows"), - ), - (pnd.int8, np.int8), - (pnd.int16, np.int16), - (pnd.int32, np.int32), - (pnd.int64, np.int64), - (pnd.uint8, np.uint8), - (pnd.uint16, np.uint16), - (pnd.uint32, np.uint32), - (pnd.uint64, np.uint64), - ), -) -def test_float32(data, pnp_dtype, np_dtype): - class MyModel(BaseModel): - V: pnp_dtype - - assert MyModel(V=data).V == np_dtype(data) - - -@pytest.mark.parametrize("data", (1 + 1j, 1.0 + 1.0j)) -@pytest.mark.parametrize( - "pnp_dtype,np_dtype", - ( - (pnd.complex64, np.complex64), - (pnd.complex128, np.complex128), - pytest.param( - pnd.complex256, - np_complex256, - marks=pytest.mark.skipif(sys.platform == "win32", reason="dtype is not available on windows"), - ), - ), -) -def test_complex256(data, pnp_dtype, np_dtype): - class MyModel(BaseModel): - V: pnp_dtype - - assert MyModel(V=data).V == np_dtype(data) diff --git a/tests/test_model.py b/tests/test_model.py deleted file mode 100644 index 9cd0732..0000000 --- a/tests/test_model.py +++ /dev/null @@ -1,84 +0,0 @@ -import shutil -from pathlib import Path -from typing import Union - -import numpy as np -import pytest - -from pydantic_numpy import NDArray, NDArrayBool -from pydantic_numpy.model import NumpyModel - -TEST_DUMP_PATH: Path = Path(__file__).absolute().parent / "delete_me_test_dump" -TEST_MODEL_OBJECT_ID = "test" - - -class NumpyModelForTest(NumpyModel): - array: NDArray - non_array: int - - -class TestWithArbitraryForTest(NumpyModelForTest): - my_arbitrary_slice: slice - - class Config: - arbitrary_types_allowed = True - - -numpy_bool_array: NDArrayBool = np.array([True, True, True, True, True], dtype=bool) - - -def _numpy_model(): - return NumpyModelForTest(array=numpy_bool_array, non_array=5) - - -@pytest.fixture -def numpy_model(): - return _numpy_model() - - -@pytest.fixture( - params=[ - _numpy_model(), - TestWithArbitraryForTest(array=numpy_bool_array, non_array=5, my_arbitrary_slice=slice(0, 10)), - ] -) -def numpy_model_with_arbitrary(request): - return request.param - - -def test_io_yaml(numpy_model: NumpyModel) -> None: - try: - TEST_DUMP_PATH.mkdir(exist_ok=True) - numpy_model.dump(TEST_DUMP_PATH, TEST_MODEL_OBJECT_ID) - _test_loaded_numpy_model(numpy_model.load(TEST_DUMP_PATH, TEST_MODEL_OBJECT_ID)) - finally: - _delete_leftovers(numpy_model) - - -def test_io_compressed_pickle(numpy_model_with_arbitrary: NumpyModel) -> None: - try: - TEST_DUMP_PATH.mkdir(exist_ok=True) - numpy_model_with_arbitrary.dump(TEST_DUMP_PATH, TEST_MODEL_OBJECT_ID, pickle=True) - _test_loaded_numpy_model(numpy_model_with_arbitrary.load(TEST_DUMP_PATH, TEST_MODEL_OBJECT_ID)) - - finally: - _delete_leftovers(numpy_model_with_arbitrary) - - -def test_io_pickle(numpy_model_with_arbitrary: NumpyModel) -> None: - try: - TEST_DUMP_PATH.mkdir(exist_ok=True) - numpy_model_with_arbitrary.dump(TEST_DUMP_PATH, TEST_MODEL_OBJECT_ID, pickle=True, compress=False) - _test_loaded_numpy_model(numpy_model_with_arbitrary.load(TEST_DUMP_PATH, TEST_MODEL_OBJECT_ID)) - finally: - _delete_leftovers(numpy_model_with_arbitrary) - - -def _test_loaded_numpy_model(model: Union[NumpyModelForTest, TestWithArbitraryForTest]) -> None: - assert np.all(model.array) and len(model.array) == 5 - if isinstance(model, TestWithArbitraryForTest): - assert isinstance(model.my_arbitrary_slice, slice) - - -def _delete_leftovers(model: NumpyModel) -> None: - shutil.rmtree(model.model_directory_path(TEST_DUMP_PATH, TEST_MODEL_OBJECT_ID)) diff --git a/tests/test_ndarray.py b/tests/test_ndarray.py deleted file mode 100644 index d22404b..0000000 --- a/tests/test_ndarray.py +++ /dev/null @@ -1,142 +0,0 @@ -from pathlib import Path -from typing import Dict, Optional - -import numpy as np -import pytest -from numpy.testing import assert_allclose -from pydantic import BaseModel, ValidationError - -import pydantic_numpy.dtype as pnd -from pydantic_numpy import NDArray -from pydantic_numpy.ndarray import NPFileDesc, PotentialNDArray - -JSON_ENCODERS = {np.ndarray: lambda arr: arr.tolist()} - - -class NDArrayTestingModel(BaseModel): - K: pnd.NDArrayFp32 - - class Config: - json_encoders = JSON_ENCODERS - - -def test_init_from_values(): - # Directly specify values - cfg = NDArrayTestingModel(K=[1, 2]) - assert_allclose(cfg.K, [1.0, 2.0]) - assert cfg.K.dtype == np.float32 - assert cfg.json() - - cfg = NDArrayTestingModel(K=np.eye(2)) - assert_allclose(cfg.K, [[1.0, 0], [0.0, 1.0]]) - assert cfg.K.dtype == np.float32 - - -def test_load_from_npy_path(tmpdir): - # Load from npy - np.save(Path(tmpdir) / "data.npy", np.arange(5)) - cfg = NDArrayTestingModel(K={"path": Path(tmpdir) / "data.npy"}) - assert_allclose(cfg.K, [0.0, 1.0, 2.0, 3.0, 4.0]) - assert cfg.K.dtype == np.float32 - - -def test_load_from_NPFileDesc(tmpdir): - np.save(Path(tmpdir) / "data.npy", np.arange(5)) - cfg = NDArrayTestingModel(K=NPFileDesc(path=Path(tmpdir) / "data.npy")) - assert_allclose(cfg.K, [0.0, 1.0, 2.0, 3.0, 4.0]) - assert cfg.K.dtype == np.float32 - - -def test_load_field_from_npz(tmpdir): - np.savez(Path(tmpdir) / "data.npz", values=np.arange(5)) - cfg = NDArrayTestingModel(K={"path": Path(tmpdir) / "data.npz", "key": "values"}) - assert_allclose(cfg.K, [0.0, 1.0, 2.0, 3.0, 4.0]) - assert cfg.K.dtype == np.float32 - - -def test_exceptional(tmpdir): - with pytest.raises(ValidationError): - NDArrayTestingModel(K={"path": Path(tmpdir) / "nosuchfile.npz", "key": "values"}) - - with pytest.raises(ValidationError): - NDArrayTestingModel(K={"path": Path(tmpdir) / "nosuchfile.npy", "key": "nosuchkey"}) - - with pytest.raises(ValidationError): - NDArrayTestingModel(K={"path": Path(tmpdir) / "nosuchfile.npy"}) - - with pytest.raises(ValidationError): - NDArrayTestingModel(K="absc") - - -def test_unspecified_npdtype(): - # Not specifying a dtype will use numpy default dtype resolver - - class NDArrayNoGeneric(BaseModel): - K: NDArray - - cfg = NDArrayNoGeneric(K=[1, 2]) - assert_allclose(cfg.K, [1, 2]) - assert cfg.K.dtype == int - - -def test_json_encoders(): - import json - - class NDArrayNoGeneric(BaseModel): - K: NDArray - - class Config: - json_encoders = JSON_ENCODERS - - cfg = NDArrayNoGeneric(K=[1, 2]) - jdata = json.loads(cfg.json()) - - assert "K" in jdata - assert type(jdata["K"]) == list - assert jdata["K"] == list([1, 2]) - - -def test_optional_construction(): - class NDArrayOptional(BaseModel): - K: Optional[pnd.NDArrayFp32] - - cfg = NDArrayOptional() - assert cfg.K is None - - cfg = NDArrayOptional(K=[1, 2]) - assert type(cfg.K) == np.ndarray - assert cfg.K.dtype == np.float32 - - -def test_potential_array(tmpdir): - class NDArrayPotential(BaseModel): - K: PotentialNDArray[float, pnd.float32] - - np.savez(Path(tmpdir) / "data.npz", values=np.arange(5)) - - cfg = NDArrayPotential(K={"path": Path(tmpdir) / "data.npz", "key": "values"}) - assert cfg.K is not None - assert_allclose(cfg.K, [0.0, 1.0, 2.0, 3.0, 4.0]) - - # Path not found - cfg = NDArrayPotential(K={"path": Path(tmpdir) / "nothere.npz", "key": "values"}) - assert cfg.K is None - - # Key not there - cfg = NDArrayPotential(K={"path": Path(tmpdir) / "data.npz", "key": "nothere"}) - assert cfg.K is None - - -def test_subclass_basemodel(): - model_field = NDArrayTestingModel(K=[1.0, 2.0]) - assert model_field.json() - - class MappingTestingModel(BaseModel): - L: Dict[str, NDArrayTestingModel] - - class Config: - json_encoders = JSON_ENCODERS - - model = MappingTestingModel(L={"a": NDArrayTestingModel(K=[1.0, 2.0])}) - assert model.L["a"].K.dtype == np.dtype("float32") - assert model.json() diff --git a/tests/test_np_model.py b/tests/test_np_model.py new file mode 100644 index 0000000..d62e79b --- /dev/null +++ b/tests/test_np_model.py @@ -0,0 +1,96 @@ +import tempfile +from pathlib import Path + +import numpy as np +import pytest +from hypothesis.extra.numpy import arrays + +from pydantic_numpy.model import NumpyModel +from pydantic_numpy.model.np_model import model_agnostic_load +from pydantic_numpy.typing import NpNDArray + +TEST_MODEL_OBJECT_ID = "test" +OTHER_TEST_MODEL_OBJECT_ID = "other_test" +NON_ARRAY_VALUE = 5 + + +class NumpyModelForTest(NumpyModel): + array: NpNDArray + non_array: int + + +class TestWithArbitraryForTest(NumpyModelForTest, arbitrary_types_allowed=True): + my_arbitrary_slice: slice + + +def _create_example_array(): + return arrays(np.float64, (1,)).example() + + +def _numpy_model(): + return NumpyModelForTest(array=_create_example_array(), non_array=NON_ARRAY_VALUE) + + +@pytest.fixture +def numpy_model(): + return _numpy_model() + + +@pytest.fixture( + params=[ + _numpy_model(), + TestWithArbitraryForTest( + array=_create_example_array(), non_array=NON_ARRAY_VALUE, my_arbitrary_slice=slice(0, 10) + ), + ] +) +def numpy_model_with_arbitrary(request): + return request.param + + +def test_io_yaml(numpy_model: NumpyModel) -> None: + with tempfile.TemporaryDirectory() as tmp_dirname: + numpy_model.dump(tmp_dirname, TEST_MODEL_OBJECT_ID) + assert numpy_model.load(tmp_dirname, TEST_MODEL_OBJECT_ID) == numpy_model + + +def test_io_compressed_pickle(numpy_model_with_arbitrary: NumpyModel) -> None: + with tempfile.TemporaryDirectory() as tmp_dirname: + numpy_model_with_arbitrary.dump(tmp_dirname, TEST_MODEL_OBJECT_ID, pickle=True) + assert numpy_model_with_arbitrary.load(tmp_dirname, TEST_MODEL_OBJECT_ID) == numpy_model_with_arbitrary + + +def test_io_pickle(numpy_model_with_arbitrary: NumpyModel) -> None: + with tempfile.TemporaryDirectory() as tmp_dirname: + numpy_model_with_arbitrary.dump(tmp_dirname, TEST_MODEL_OBJECT_ID, pickle=True, compress=False) + assert numpy_model_with_arbitrary.load(tmp_dirname, TEST_MODEL_OBJECT_ID) == numpy_model_with_arbitrary + + +def test_typing_json_dump(numpy_model: NumpyModel): + assert numpy_model.model_dump_json() == '{"array":"%s","non_array":%s}' % ( + np.array2string(numpy_model.array), + NON_ARRAY_VALUE, + ), "" + + +def test_model_agnostic_load(): + class NumpyModelAForTest(NumpyModel): + array: NpNDArray + non_array: int + + class NumpyModelBForTest(NumpyModel): + array: NpNDArray + non_array: int + + model_a = NumpyModelAForTest(array=_create_example_array(), non_array=NON_ARRAY_VALUE) + model_b = NumpyModelBForTest(array=_create_example_array(), non_array=NON_ARRAY_VALUE) + + with tempfile.TemporaryDirectory() as tmp_dirname: + tmp_dir_path = Path(tmp_dirname) + + model_a.dump(tmp_dir_path, TEST_MODEL_OBJECT_ID) + model_b.dump(tmp_dir_path, OTHER_TEST_MODEL_OBJECT_ID) + + models = [model_a, model_b] + assert model_a == model_agnostic_load(tmp_dir_path, TEST_MODEL_OBJECT_ID, models=models) + assert model_b == model_agnostic_load(tmp_dir_path, OTHER_TEST_MODEL_OBJECT_ID, models=models) diff --git a/tests/test_typing.py b/tests/test_typing.py new file mode 100644 index 0000000..5ef3370 --- /dev/null +++ b/tests/test_typing.py @@ -0,0 +1,97 @@ +import tempfile +from pathlib import Path +from typing import Optional + +import numpy as np +import numpy.typing as npt +import pytest +from hypothesis.extra.numpy import arrays +from pydantic import ValidationError + +from pydantic_numpy.helper.validation import PydanticNumpyMultiArrayNumpyFileOnFilePath +from pydantic_numpy.model import MultiArrayNumpyFile +from pydantic_numpy.util import np_general_all_close +from tests.helper.cache import cached_calculation +from tests.helper.groups import ( + data_type_array_typing_dimensions, + dimension_testing_group, + strict_data_type_nd_array_typing_dimensions, + supported_data_types, +) + +AXIS_LENGTH = 1 + + +@pytest.mark.parametrize("numpy_dtype,pydantic_typing,dimensions", data_type_array_typing_dimensions) +def test_correct_type(numpy_dtype: npt.DTypeLike, pydantic_typing, dimensions: Optional[int]): + assert cached_calculation(pydantic_typing)( + array_field=arrays(numpy_dtype, tuple(AXIS_LENGTH for _ in range(dimensions or 1))).example() + ) + + +@pytest.mark.parametrize("numpy_dtype,pydantic_typing,dimensions", strict_data_type_nd_array_typing_dimensions) +@pytest.mark.parametrize("wrong_numpy_type", supported_data_types) +def test_wrong_dtype_type(numpy_dtype: npt.DTypeLike, pydantic_typing, dimensions: Optional[int], wrong_numpy_type): + if wrong_numpy_type == numpy_dtype: + return True + + bad_array = arrays(wrong_numpy_type, tuple(AXIS_LENGTH for _ in range(dimensions or 5))).example() + with pytest.raises(ValidationError): + cached_calculation(pydantic_typing)(array_field=bad_array) + + +@pytest.mark.parametrize("numpy_dtype,pydantic_typing,dimensions", dimension_testing_group) +def test_wrong_dimension(numpy_dtype: npt.DTypeLike, pydantic_typing, dimensions: Optional[int]): + wrong_dimension = dimensions + 1 + + bad_array = arrays(numpy_dtype, tuple(AXIS_LENGTH for _ in range(wrong_dimension or 5))).example() + with pytest.raises(ValueError): + cached_calculation(pydantic_typing)(array_field=bad_array) + + +@pytest.mark.parametrize("numpy_dtype,pydantic_typing,dimensions", data_type_array_typing_dimensions) +def test_file_path_passing_validation(numpy_dtype: npt.DTypeLike, pydantic_typing, dimensions: Optional[int]): + hyp_array = arrays(numpy_dtype, tuple(AXIS_LENGTH for _ in range(dimensions or 1))).example() + with tempfile.NamedTemporaryFile(mode="w+", delete=True, suffix=".npz") as tf: + np.savez_compressed(tf.name, my_array=hyp_array) + numpy_model = cached_calculation(pydantic_typing)(array_field=Path(tf.name)) + + assert np_general_all_close(numpy_model.array_field, hyp_array) + + +@pytest.mark.parametrize("numpy_dtype,pydantic_typing,dimensions", data_type_array_typing_dimensions) +def test_file_path_error_on_reading_single_array_file( + numpy_dtype: npt.DTypeLike, pydantic_typing, dimensions: Optional[int] +): + hyp_array = arrays(numpy_dtype, tuple(AXIS_LENGTH for _ in range(dimensions or 1))).example() + with tempfile.NamedTemporaryFile(mode="w+", delete=True, suffix=".npz") as tf: + np.savez_compressed(tf.name, my_array=hyp_array, my_identical_array=hyp_array) + model = cached_calculation(pydantic_typing) + + with pytest.raises(PydanticNumpyMultiArrayNumpyFileOnFilePath): + model(array_field=Path(tf.name)) + + +@pytest.mark.parametrize("numpy_dtype,pydantic_typing,dimensions", data_type_array_typing_dimensions) +def test_multi_array_numpy_passing_validation(numpy_dtype: npt.DTypeLike, pydantic_typing, dimensions: Optional[int]): + hyp_array = arrays(numpy_dtype, tuple(AXIS_LENGTH for _ in range(dimensions or 1))).example() + with tempfile.NamedTemporaryFile(mode="w+", delete=True, suffix=".npz") as tf: + np.savez_compressed(tf.name, my_array=hyp_array) + numpy_model = cached_calculation(pydantic_typing)( + array_field=MultiArrayNumpyFile(path=Path(tf.name), key="my_array") + ) + + assert np_general_all_close(numpy_model.array_field, hyp_array) + + +@pytest.mark.parametrize("numpy_dtype,pydantic_typing,dimensions", data_type_array_typing_dimensions) +def test_multi_array_numpy_error_on_reading_single_array_file( + numpy_dtype: npt.DTypeLike, pydantic_typing, dimensions: Optional[int] +): + hyp_array = arrays(numpy_dtype, tuple(AXIS_LENGTH for _ in range(dimensions or 1))).example() + with tempfile.NamedTemporaryFile(mode="w+", delete=True, suffix=".npy") as tf: + np.save(tf.name, hyp_array) + model = cached_calculation(pydantic_typing) + + with pytest.raises(AttributeError): + model(array_field=MultiArrayNumpyFile(path=Path(tf.name), key="my_array"))