Skip to content

Commit

Permalink
feat (typegen): typegen.generate_typing tool for auto-generating types,
Browse files Browse the repository at this point in the history
has Justfile target
chore(dependency): Add Pyright, static analyzer, dependency
test(pyright): Pyright testing can be run from Justfile, and github workflows
chore(version): Bump PATCH version (4.1.1)
  • Loading branch information
futurwasfree authored and Can H. Tartanoglu committed Dec 6, 2023
1 parent d9387d4 commit 133db0d
Show file tree
Hide file tree
Showing 18 changed files with 1,030 additions and 258 deletions.
24 changes: 24 additions & 0 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,30 @@ jobs:
--strict-optional \
-p pydantic_numpy
pyright:
runs-on: ubuntu-latest

strategy:
matrix:
python-version: [ "3.9", "3.10", "3.11" ]

steps:
- uses: actions/checkout@v3

- name: Setup the Python Environment ${{ matrix.python-version }}
uses: Qwerty-133/python-setup@v1
with:
python-version: ${{ matrix.python-version }}
skip-pre-commit: true

- name: Install dependencies
run: |
poetry install --all-extras --with dev,typecheck
- name: Validate type-hints with pyright
run: |
poetry run pyright pydantic_numpy
format:
runs-on: ubuntu-latest

Expand Down
9 changes: 9 additions & 0 deletions Justfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,12 @@ mypy:

mypy_test:
poetry run mypy tests/

pyright:
poetry run pyright pydantic_numpy

pyright_test:
poetry run pyright tests/

typegen:
poetry run python typegen/generate_typing.py
56 changes: 54 additions & 2 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 6 additions & 6 deletions pydantic_numpy/helper/annotation.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,8 @@ def np_array_pydantic_annotated_typing(
FilePath,
MultiArrayNumpyFile,
np.ndarray[ # type: ignore[misc]
_int_to_dim_type[dimensions] if dimensions else Any,
np.dtype[data_type] if _data_type_resolver(data_type) else data_type,
_int_to_dim_type[dimensions] if dimensions else Any, # pyright: ignore
np.dtype[data_type] if _data_type_resolver(data_type) else data_type, # type: ignore[misc]
],
],
NpArrayPydanticAnnotation.factory(
Expand All @@ -126,15 +126,15 @@ def np_array_pydantic_annotated_typing(
]


def _data_type_resolver(data_type: DTypeLike):
return data_type is not None and issubclass(data_type, np.generic)
def _data_type_resolver(data_type: DTypeLike) -> bool:
return data_type is not None and issubclass(data_type, np.generic) # pyright: ignore


def _serialize_numpy_array_to_data_dict(array: np.ndarray) -> NumpyDataDict:
if issubclass(array.dtype.type, np.timedelta64) or issubclass(array.dtype.type, np.datetime64):
return dict(data_type=str(array.dtype), data=array.astype(int).tolist())
return NumpyDataDict(data_type=str(array.dtype), data=array.astype(int).tolist())

return dict(data_type=str(array.dtype), data=array.astype(float).tolist())
return NumpyDataDict(data_type=str(array.dtype), data=array.astype(float).tolist())


@validate_call
Expand Down
7 changes: 4 additions & 3 deletions pydantic_numpy/helper/validation.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Callable, Optional
from typing import Callable, Optional, cast

import numpy as np
import numpy.typing as npt
Expand Down Expand Up @@ -118,6 +118,7 @@ def _resolve_type_of_array_dtype(array_dtype: npt.DTypeLike) -> type:
type
"""
if hasattr(array_dtype, "type"):
return array_dtype.type
assert array_dtype is not None
return array_dtype.type # pyright: ignore
else:
return array_dtype
return cast(type, array_dtype)
12 changes: 6 additions & 6 deletions pydantic_numpy/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,12 +99,12 @@ def load(
npz_file = np.load(object_directory_path / cls._dump_numpy_savez_file_name)

other_path: FilePath
if (other_path := object_directory_path / cls._dump_compressed_pickle_file_name).exists():
if (other_path := object_directory_path / cls._dump_compressed_pickle_file_name).exists(): # pyright: ignore
other_field_to_value = compress_pickle.load(other_path)
elif (other_path := object_directory_path / cls._dump_pickle_file_name).exists():
elif (other_path := object_directory_path / cls._dump_pickle_file_name).exists(): # pyright: ignore
with open(other_path, "rb") as in_pickle:
other_field_to_value = pickle_pkg.load(in_pickle)
elif (other_path := object_directory_path / cls._dump_non_array_yaml_name).exists():
elif (other_path := object_directory_path / cls._dump_non_array_yaml_name).exists(): # pyright: ignore
with open(other_path, "r") as in_yaml:
other_field_to_value = yaml.load(in_yaml)
else:
Expand Down Expand Up @@ -139,15 +139,15 @@ def dump(
if compress:
compress_pickle.dump(
other_field_to_value,
dump_directory_path / self._dump_compressed_pickle_file_name,
dump_directory_path / self._dump_compressed_pickle_file_name, # pyright: ignore
compression=self._dump_compression,
)
else:
with open(dump_directory_path / self._dump_pickle_file_name, "wb") as out_pickle:
with open(dump_directory_path / self._dump_pickle_file_name, "wb") as out_pickle: # pyright: ignore
pickle_pkg.dump(other_field_to_value, out_pickle)

else:
with open(dump_directory_path / self._dump_non_array_yaml_name, "w") as out_yaml:
with open(dump_directory_path / self._dump_non_array_yaml_name, "w") as out_yaml: # pyright: ignore
yaml.dump(other_field_to_value, out_yaml)

return dump_directory_path
Expand Down
Loading

0 comments on commit 133db0d

Please sign in to comment.