From d3e05f9f1eb10913f1501cdecfe8f65856af3f95 Mon Sep 17 00:00:00 2001 From: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com> Date: Sat, 23 Apr 2022 02:51:59 -0500 Subject: [PATCH 1/3] chore: run nightly tf in CI and fix compatibility with TF 2.9.0 --- .github/workflows/tests.yaml | 11 +++++++---- pyproject.toml | 8 +++----- scikeras/wrappers.py | 5 +++++ 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 97eb4e83..df788b38 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -4,6 +4,9 @@ on: push: branches: [ master ] pull_request: + schedule: + # run every day at midnight + - cron: "0 0 * * *" jobs: Linting: @@ -26,7 +29,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7, 3.8] + python-version: ["3.7", "3.8", "3.9"] steps: - uses: actions/checkout@v2 @@ -58,7 +61,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.8] + python-version: ["3.9"] steps: - uses: actions/checkout@v2 @@ -100,7 +103,7 @@ jobs: strategy: matrix: tf-version: [2.7.0] - python-version: [3.7, 3.8] + python-version: ["3.7", "3.9"] sklearn-version: [1.0.0] steps: @@ -135,7 +138,7 @@ jobs: strategy: matrix: os: [MacOS, Windows] # test all OSs (except Ubuntu, which is already running other tests) - python-version: [3.7, 3.9] # test only the two extremes of supported Python versions + python-version: ["3.7", "3.9"] # test only the two extremes of supported Python versions steps: - uses: actions/checkout@v2 diff --git a/pyproject.toml b/pyproject.toml index 76f36867..0d0af515 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ repository = "https://github.com/adriangb/scikeras" version = "0.6.1" [tool.poetry.dependencies] -importlib-metadata = {version = "^3", python = "<3.8"} +importlib-metadata = {version = ">=3", python = "<3.8"} python = ">=3.7.0,<3.10.0" scikit-learn = ">=1.0.0" packaging = ">=0.21,<22.0" @@ -43,7 +43,6 @@ tensorflow-cpu = ["tensorflow-cpu"] [tool.poetry.dev-dependencies] tensorflow = ">=2.7.0" coverage = {extras = ["toml"], version = ">=5.4"} -dataclasses = {version = "^0.8", python = "<3.7"} insipid-sphinx-theme = ">=0.2.2" ipykernel = ">=5.4.2" jupyter = ">=1.0.0" @@ -54,7 +53,6 @@ numpydoc = ">=1.1.0" pre-commit = ">=2.10.1" pytest = ">=6.2.2" pytest-cov = ">=2.11.1" -pytest-sugar = "v0.9.4" sphinx = ">=3.2.1" [tool.isort] @@ -84,5 +82,5 @@ source = ["scikeras/"] show_missing = true [build-system] -build-backend = "poetry.masonry.api" -requires = ["poetry>=1.0.10"] +build-backend = "poetry.core.masonry.api" +requires = ["poetry-core>=1.0.8"] diff --git a/scikeras/wrappers.py b/scikeras/wrappers.py index c5afbb42..4badcf8b 100644 --- a/scikeras/wrappers.py +++ b/scikeras/wrappers.py @@ -648,6 +648,11 @@ def _check_array_dtype(arr, force_numeric): f"X has {len(X_shape_)} dimensions, but this {self.__name__}" f" is expecting {len(self.X_shape_)} dimensions in X." ) + if X_shape_[1:] != self.X_shape_[1:]: + raise ValueError( + f"X has shape {X_shape_[1:]}, but this {self.__name__}" + f" is expecting X of shape {self.X_shape_[1:]}" + ) return X, y def _type_of_target(self, y: np.ndarray) -> str: From feff2de87a56adc1cd6da3e519301e2551af082c Mon Sep 17 00:00:00 2001 From: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com> Date: Sat, 23 Apr 2022 02:55:58 -0500 Subject: [PATCH 2/3] update pre-commit --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e80f5374..562a1f3c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,11 +1,11 @@ repos: - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://github.com/timothycrosley/isort - rev: 5.3.2 + rev: 5.10.1 hooks: - id: isort additional_dependencies: [toml] From 54321d646caf7ac94359001f03c8c2f4ca0c2590 Mon Sep 17 00:00:00 2001 From: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com> Date: Sat, 23 Apr 2022 03:01:13 -0500 Subject: [PATCH 3/3] black reformatted a ton of stuff --- docs/source/conf.py | 3 +- scikeras/_utils.py | 25 ++++-- scikeras/wrappers.py | 28 ++++-- tests/mlp_models.py | 3 +- tests/multi_output_models.py | 6 +- tests/test_api.py | 144 ++++++++++++++++++++++-------- tests/test_basewrapper.py | 6 +- tests/test_callbacks.py | 9 +- tests/test_compile_kwargs.py | 48 ++++++++-- tests/test_errors.py | 40 ++++++--- tests/test_input_outputs.py | 27 +++--- tests/test_param_routing.py | 24 ++++- tests/test_parameters.py | 22 ++++- tests/test_scikit_learn_checks.py | 14 ++- tests/test_serialization.py | 58 +++++++----- 15 files changed, 322 insertions(+), 135 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index df28bc9e..e83adca2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -31,8 +31,7 @@ def maybe_skip_member(app, what, name, obj, skip, options) -> bool: - """Skip all private members, including __init__ - """ + """Skip all private members, including __init__""" if name.startswith("_"): return True return skip diff --git a/scikeras/_utils.py b/scikeras/_utils.py index ab0f23d3..142946f4 100644 --- a/scikeras/_utils.py +++ b/scikeras/_utils.py @@ -77,8 +77,7 @@ def has_param(func: Callable, param: str) -> bool: def accepts_kwargs(func: Callable) -> bool: - """Check if ``func`` accepts kwargs. - """ + """Check if ``func`` accepts kwargs.""" return any( True for param in inspect.signature(func).parameters.values() @@ -99,7 +98,10 @@ def unflatten_params(items, params, base_params=None): args_and_kwargs[p] = unflatten_params( items=v, params=route_params( - params=params, destination=f"{p}", pass_filter=set(), strict=False, + params=params, + destination=f"{p}", + pass_filter=set(), + strict=False, ), ) kwargs = {k: v for k, v in args_and_kwargs.items() if k[0] not in DIGITS} @@ -112,7 +114,10 @@ def unflatten_params(items, params, base_params=None): new_base_params = {p: v for p, v in params.items() if "__" not in p} for idx, item in enumerate(items): item_params = route_params( - params=params, destination=f"{idx}", pass_filter=set(), strict=False, + params=params, + destination=f"{idx}", + pass_filter=set(), + strict=False, ) res.append( unflatten_params( @@ -125,10 +130,15 @@ def unflatten_params(items, params, base_params=None): new_base_params = {p: v for p, v in params.items() if "__" not in p} for key, item in items.items(): item_params = route_params( - params=params, destination=f"{key}", pass_filter=set(), strict=False, + params=params, + destination=f"{key}", + pass_filter=set(), + strict=False, ) res[key] = unflatten_params( - items=item, params=item_params, base_params=new_base_params, + items=item, + params=item_params, + base_params=new_base_params, ) return res # non-compilable item, check if it has any routed parameters @@ -174,8 +184,7 @@ def get_loss_class_function_or_string(loss: str) -> Union[losses_mod.Loss, Calla def try_to_convert_strings_to_classes( items: Union[str, dict, tuple, list], class_getter: Callable ): - """Convert shorthand optimizer/loss/metric names to classes. - """ + """Convert shorthand optimizer/loss/metric names to classes.""" if isinstance(items, str): return class_getter(items) # single item, despite parameter name elif isinstance(items, Sequence): diff --git a/scikeras/wrappers.py b/scikeras/wrappers.py index 4badcf8b..f885e23a 100644 --- a/scikeras/wrappers.py +++ b/scikeras/wrappers.py @@ -261,7 +261,8 @@ def current_epoch(self) -> int: @staticmethod def _validate_sample_weight( - X: np.ndarray, sample_weight: Union[np.ndarray, Iterable], + X: np.ndarray, + sample_weight: Union[np.ndarray, Iterable], ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: """Validate that the passed sample_weight and ensure it is a Numpy array.""" sample_weight = _check_sample_weight( @@ -331,7 +332,9 @@ def _get_compile_kwargs(self): """ init_params = self.get_params() compile_kwargs = route_params( - init_params, destination="compile", pass_filter=self._compile_kwargs, + init_params, + destination="compile", + pass_filter=self._compile_kwargs, ) compile_kwargs["optimizer"] = try_to_convert_strings_to_classes( compile_kwargs["optimizer"], get_optimizer_class @@ -339,7 +342,10 @@ def _get_compile_kwargs(self): compile_kwargs["optimizer"] = unflatten_params( items=compile_kwargs["optimizer"], params=route_params( - init_params, destination="optimizer", pass_filter=set(), strict=True, + init_params, + destination="optimizer", + pass_filter=set(), + strict=True, ), ) compile_kwargs["loss"] = try_to_convert_strings_to_classes( @@ -348,7 +354,10 @@ def _get_compile_kwargs(self): compile_kwargs["loss"] = unflatten_params( items=compile_kwargs["loss"], params=route_params( - init_params, destination="loss", pass_filter=set(), strict=False, + init_params, + destination="loss", + pass_filter=set(), + strict=False, ), ) compile_kwargs["metrics"] = try_to_convert_strings_to_classes( @@ -357,7 +366,10 @@ def _get_compile_kwargs(self): compile_kwargs["metrics"] = unflatten_params( items=compile_kwargs["metrics"], params=route_params( - init_params, destination="metrics", pass_filter=set(), strict=False, + init_params, + destination="metrics", + pass_filter=set(), + strict=False, ), ) return compile_kwargs @@ -728,7 +740,11 @@ def fit(self, X, y, sample_weight=None, **kwargs) -> "BaseWrapper": kwargs["initial_epoch"] = kwargs.get("initial_epoch", 0) self._fit( - X=X, y=y, sample_weight=sample_weight, warm_start=self.warm_start, **kwargs, + X=X, + y=y, + sample_weight=sample_weight, + warm_start=self.warm_start, + **kwargs, ) return self diff --git a/tests/mlp_models.py b/tests/mlp_models.py index 4ffa845a..15114f54 100644 --- a/tests/mlp_models.py +++ b/tests/mlp_models.py @@ -60,8 +60,7 @@ def dynamic_regressor( meta: Optional[Dict[str, Any]] = None, compile_kwargs: Optional[Dict[str, Any]] = None, ) -> Model: - """Creates a basic MLP regressor dynamically. - """ + """Creates a basic MLP regressor dynamically.""" # get parameters n_features_in_ = meta["n_features_in_"] n_outputs_ = meta["n_outputs_"] diff --git a/tests/multi_output_models.py b/tests/multi_output_models.py index 276cdd39..c156b5ca 100644 --- a/tests/multi_output_models.py +++ b/tests/multi_output_models.py @@ -11,7 +11,8 @@ class MultiLabelTransformer(ClassifierLabelEncoder): def __init__( - self, split: bool = True, + self, + split: bool = True, ): super().__init__() self.split = split @@ -79,8 +80,7 @@ def target_encoder(self) -> MultiLabelTransformer: return MultiLabelTransformer(split=self.split) def score(self, X, y): - """Taken from sklearn.multiouput.MultiOutputClassifier - """ + """Taken from sklearn.multiouput.MultiOutputClassifier""" if self.target_type_ != "multilabel-indicator": return super().score(X, y) return np.mean(np.all(y == self.predict(X), axis=1)) diff --git a/tests/test_api.py b/tests/test_api.py index 6a521afc..4c251bc7 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -33,7 +33,9 @@ def build_fn_clf( - hidden_dim, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + hidden_dim, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: """Builds a Sequential based classifier.""" # extract parameters @@ -55,7 +57,9 @@ def build_fn_clf( def build_fn_reg( - hidden_dim, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + hidden_dim, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: """Builds a Sequential based regressor.""" # extract parameters @@ -74,19 +78,29 @@ def build_fn_reg( class InheritClassBuildFnClf(KerasClassifier): def _keras_build_fn( - self, hidden_dim, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + self, + hidden_dim, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: return build_fn_clf( - hidden_dim=hidden_dim, meta=meta, compile_kwargs=compile_kwargs, + hidden_dim=hidden_dim, + meta=meta, + compile_kwargs=compile_kwargs, ) class InheritClassBuildFnReg(KerasRegressor): def _keras_build_fn( - self, hidden_dim, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + self, + hidden_dim, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: return build_fn_reg( - hidden_dim=hidden_dim, meta=meta, compile_kwargs=compile_kwargs, + hidden_dim=hidden_dim, + meta=meta, + compile_kwargs=compile_kwargs, ) @@ -112,7 +126,10 @@ def test_regression_build_fn(self): def test_regression_inherit_class_build_fn(self): """Tests for errors using KerasRegressor inherited.""" - reg = InheritClassBuildFnReg(model=None, hidden_dim=5,) + reg = InheritClassBuildFnReg( + model=None, + hidden_dim=5, + ) basic_checks(reg, fetch_california_housing) @@ -128,7 +145,9 @@ def load_digits8x8(): def build_fn_regs( - hidden_layer_sizes, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + hidden_layer_sizes, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: """Dynamically build regressor.""" # get params @@ -145,7 +164,9 @@ def build_fn_regs( def build_fn_clss( - hidden_layer_sizes, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + hidden_layer_sizes, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: """Dynamically build classifier.""" # get params @@ -161,7 +182,9 @@ def build_fn_clss( def build_fn_clscs( - hidden_layer_sizes, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + hidden_layer_sizes, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: """Dynamically build functional API regressor.""" # get params @@ -179,7 +202,9 @@ def build_fn_clscs( def build_fn_clscf( - hidden_layer_sizes, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + hidden_layer_sizes, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: """Dynamically build functional API classifier.""" # get params @@ -229,7 +254,8 @@ class TestAdvancedAPIFuncs: """Tests advanced features such as pipelines and hyperparameter tuning.""" @pytest.mark.parametrize( - "config", ["MLPRegressor", "MLPClassifier", "CNNClassifier", "CNNClassifierF"], + "config", + ["MLPRegressor", "MLPClassifier", "CNNClassifier", "CNNClassifierF"], ) def test_standalone(self, config): """Tests standalone estimator.""" @@ -246,16 +272,21 @@ def test_pipeline(self, config): basic_checks(estimator, loader) @pytest.mark.parametrize( - "config", ["MLPRegressor", "MLPClassifier", "CNNClassifier", "CNNClassifierF"], + "config", + ["MLPRegressor", "MLPClassifier", "CNNClassifier", "CNNClassifierF"], ) def test_searchcv_init_params(self, config): """Tests compatibility with Scikit-learn's hyperparameter search CV.""" loader, model, build_fn, _ = CONFIG[config] estimator = model( - build_fn, epochs=1, validation_split=0.1, model__hidden_layer_sizes=[], + build_fn, + epochs=1, + validation_split=0.1, + model__hidden_layer_sizes=[], ) basic_checks( - GridSearchCV(estimator, {"model__hidden_layer_sizes": [[], [5]]}), loader, + GridSearchCV(estimator, {"model__hidden_layer_sizes": [[], [5]]}), + loader, ) basic_checks( RandomizedSearchCV( @@ -267,7 +298,8 @@ def test_searchcv_init_params(self, config): ) @pytest.mark.parametrize( - "config", ["MLPClassifier"], + "config", + ["MLPClassifier"], ) def test_searchcv_routed_params(self, config): """Tests compatibility with Scikit-learn's hyperparameter search CV.""" @@ -279,7 +311,10 @@ def test_searchcv_routed_params(self, config): } search = GridSearchCV(estimator, params) basic_checks(search, loader) - assert search.best_estimator_.model_.optimizer._name.lower() in ("sgd", "adam",) + assert search.best_estimator_.model_.optimizer._name.lower() in ( + "sgd", + "adam", + ) @pytest.mark.parametrize("config", ["MLPRegressor", "MLPClassifier"]) def test_ensemble(self, config): @@ -305,7 +340,8 @@ class TestPrebuiltModel: """Tests using a prebuilt model instance.""" @pytest.mark.parametrize( - "config", ["MLPRegressor", "MLPClassifier"], + "config", + ["MLPRegressor", "MLPClassifier"], ) def test_basic(self, config): """Tests using a prebuilt model.""" @@ -326,7 +362,11 @@ def test_basic(self, config): keras_model = build_fn( meta=meta, hidden_layer_sizes=(100,), - compile_kwargs={"optimizer": "adam", "loss": None, "metrics": None,}, + compile_kwargs={ + "optimizer": "adam", + "loss": None, + "metrics": None, + }, ) else: meta = { @@ -336,7 +376,11 @@ def test_basic(self, config): keras_model = build_fn( meta=meta, hidden_layer_sizes=(100,), - compile_kwargs={"optimizer": "adam", "loss": None, "metrics": None,}, + compile_kwargs={ + "optimizer": "adam", + "loss": None, + "metrics": None, + }, ) estimator = model(model=keras_model) @@ -362,7 +406,11 @@ def test_ensemble(self, config): keras_model = build_fn( meta=meta, hidden_layer_sizes=(100,), - compile_kwargs={"optimizer": "adam", "loss": None, "metrics": None,}, + compile_kwargs={ + "optimizer": "adam", + "loss": None, + "metrics": None, + }, ) else: meta = { @@ -372,7 +420,11 @@ def test_ensemble(self, config): keras_model = build_fn( meta=meta, hidden_layer_sizes=(100,), - compile_kwargs={"optimizer": "adam", "loss": None, "metrics": None,}, + compile_kwargs={ + "optimizer": "adam", + "loss": None, + "metrics": None, + }, ) base_estimator = model(model=keras_model) @@ -388,7 +440,8 @@ def test_warm_start(): X, y = data.data[:100], data.target[:100] # Initial fit estimator = KerasRegressor( - model=dynamic_regressor, model__hidden_layer_sizes=(100,), + model=dynamic_regressor, + model__hidden_layer_sizes=(100,), ) estimator.fit(X, y) model = estimator.model_ @@ -418,7 +471,10 @@ def test_partial_fit(self): data = fetch_california_housing() X, y = data.data[:100], data.target[:100] estimator = KerasRegressor( - model=dynamic_regressor, model__hidden_layer_sizes=[100,], + model=dynamic_regressor, + model__hidden_layer_sizes=[ + 100, + ], ) estimator.partial_fit(X, y) @@ -436,7 +492,9 @@ def test_partial_fit_history_metric_names(self): X, y = data.data[:100], data.target[:100] estimator = KerasRegressor( model=dynamic_regressor, - model__hidden_layer_sizes=[100,], + model__hidden_layer_sizes=[ + 100, + ], metrics=["mse", CustomMetric(name="custom_metric")], ) estimator.partial_fit(X, y) @@ -462,7 +520,9 @@ def test_partial_fit_history_len(self): estimator = KerasRegressor( model=dynamic_regressor, metrics="mean_squared_error", - model__hidden_layer_sizes=[100,], + model__hidden_layer_sizes=[ + 100, + ], ) for k in range(10): @@ -483,7 +543,11 @@ def test_partial_fit_single_epoch(self): partial_fit_iter = 4 estimator = KerasRegressor( - model=dynamic_regressor, model__hidden_layer_sizes=[100,], epochs=epochs, + model=dynamic_regressor, + model__hidden_layer_sizes=[ + 100, + ], + epochs=epochs, ) # Check that each partial_fit call trains for 1 epoch @@ -536,7 +600,8 @@ def test_current_epoch_property(self, warm_start, epochs_prefix): assert estimator.current_epoch == epochs + 1 @pytest.mark.parametrize( - "config", ["CNNClassifier", "CNNClassifierF"], + "config", + ["CNNClassifier", "CNNClassifierF"], ) def test_pf_pickle_pf(self, config): loader, model, build_fn, _ = CONFIG[config] @@ -592,7 +657,9 @@ def test_partial_fit_classes_param(self): clf = KerasClassifier( model=dynamic_classifier, loss="sparse_categorical_crossentropy", - model__hidden_layer_sizes=[100,], + model__hidden_layer_sizes=[ + 100, + ], ) X1 = np.array([[1, 2, 3], [4, 5, 6]]).T y1 = np.array([1, 2, 2]) @@ -621,8 +688,7 @@ def force_compile_shorthand(hidden_layer_sizes, meta, compile_kwargs, params): class TestHistory: def test_history(self): - """Test that history_'s keys are strings and values are lists. - """ + """Test that history_'s keys are strings and values are lists.""" data = fetch_california_housing() X, y = data.data[:100], data.target[:100] estimator = KerasRegressor( @@ -717,13 +783,18 @@ def __init__(self, *args, loss=losses_module.CosineSimilarity(), **kwargs): super().__init__(*args, **kwargs, loss=loss) loss_obj = other_loss() - estimator = DefaultLossNotNone(model=build_fn, my_loss=loss_obj,) + estimator = DefaultLossNotNone( + model=build_fn, + my_loss=loss_obj, + ) estimator.fit(X, y) assert estimator.model_.loss is loss_obj loss_obj = other_loss() estimator = DefaultLossNotNone( - model=build_fn, loss=losses_module.CategoricalHinge(), my_loss=loss_obj, + model=build_fn, + loss=losses_module.CategoricalHinge(), + my_loss=loss_obj, ) with pytest.raises(ValueError, match=" but model compiled with "): estimator.fit(X, y) @@ -755,8 +826,7 @@ def _keras_build_fn(self): class TestInitialize: - """Test the ``initialize`` method. - """ + """Test the ``initialize`` method.""" @pytest.mark.parametrize("wrapper", [KerasClassifier, KerasRegressor]) def test_prebuilt_model(self, wrapper): @@ -778,7 +848,9 @@ def test_prebuilt_model(self, wrapper): # Save Keras prediction y_pred_keras = m1.predict(X) # Keras outputs 2D despite input being 1D; reshape to match input - y_pred_keras = y_pred_keras.reshape(-1,) + y_pred_keras = y_pred_keras.reshape( + -1, + ) # Extract the weights into a copy of the model weights = m1.get_weights() m2 = keras.models.clone_model(m1) diff --git a/tests/test_basewrapper.py b/tests/test_basewrapper.py index 070241fe..f5886250 100644 --- a/tests/test_basewrapper.py +++ b/tests/test_basewrapper.py @@ -11,8 +11,7 @@ class AutoEncoderTransformer(BaseWrapper, TransformerMixin): - """Enables the ``transform`` and ``fit_transform`` methods. - """ + """Enables the ``transform`` and ``fit_transform`` methods.""" def fit(self, X): self.initialize(X) @@ -24,8 +23,7 @@ def transform(self, X): class TestAutoencoder: def test_simple_autoencoder_mnist(self): - """Tests an autoencoder following. - """ + """Tests an autoencoder following.""" # Data (x_train, _), (x_test, _) = keras.datasets.mnist.load_data() x_train = x_train.astype("float32") / 255.0 diff --git a/tests/test_callbacks.py b/tests/test_callbacks.py index be3dc6e3..15f3bad2 100644 --- a/tests/test_callbacks.py +++ b/tests/test_callbacks.py @@ -10,8 +10,7 @@ def test_callbacks_prefixes(): - """Test dispatching of callbacks using no prefix, the fit__ prefix or the predict__ prefix. - """ + """Test dispatching of callbacks using no prefix, the fit__ prefix or the predict__ prefix.""" class SentinalCallback(Callback): def __init__(self, call_logs: DefaultDict[str, int]): @@ -93,8 +92,7 @@ def get_clf() -> keras.Model: ], ) def test_callback_param_routing_syntax(callback_kwargs: Dict[str, Any]): - """Test support for the various parameter routing syntaxes for callbacks. - """ + """Test support for the various parameter routing syntaxes for callbacks.""" def get_clf() -> keras.Model: model = keras.models.Sequential() @@ -115,8 +113,7 @@ def get_clf() -> keras.Model: def test_callback_compiling_args_or_kwargs(): - """Test compiling callbacks with routed positional (args) or keyword (kwargs) arguments. - """ + """Test compiling callbacks with routed positional (args) or keyword (kwargs) arguments.""" def get_clf() -> keras.Model: model = keras.models.Sequential() diff --git a/tests/test_compile_kwargs.py b/tests/test_compile_kwargs.py index 23759aa4..34f67892 100644 --- a/tests/test_compile_kwargs.py +++ b/tests/test_compile_kwargs.py @@ -62,7 +62,9 @@ def test_optimizer_invalid_string(): optimizer = "sgf" # sgf is not a loss est = KerasClassifier( - model=get_model, optimizer=optimizer, loss="binary_crossentropy", + model=get_model, + optimizer=optimizer, + loss="binary_crossentropy", ) with pytest.raises(ValueError, match="Unknown optimizer"): est.fit(X, y) @@ -102,7 +104,11 @@ def __call__(self, y_true, y_pred, sample_weight=None): @pytest.mark.parametrize( - "loss", (losses_module.BinaryCrossentropy, "BinaryCrossentropy",), + "loss", + ( + losses_module.BinaryCrossentropy, + "BinaryCrossentropy", + ), ) @pytest.mark.parametrize("n_outputs_", (1, 2)) def test_loss(loss, n_outputs_): @@ -127,7 +133,11 @@ def test_loss_invalid_string(): loss = "binary_crossentropr" # binary_crossentropr is not a loss - est = KerasClassifier(model=get_model, num_hidden=20, loss=loss,) + est = KerasClassifier( + model=get_model, + num_hidden=20, + loss=loss, + ) with pytest.raises(ValueError, match="Unknown loss function"): est.fit(X, y) @@ -141,7 +151,11 @@ def test_loss_uncompilable(): loss = losses_module.binary_crossentropy - est = KerasClassifier(model=get_model, loss=loss, loss__from_logits=True,) + est = KerasClassifier( + model=get_model, + loss=loss, + loss__from_logits=True, + ) with pytest.raises( TypeError, match="does not accept parameters because it's not a class" ): @@ -149,7 +163,11 @@ def test_loss_uncompilable(): @pytest.mark.parametrize( - "loss", (losses_module.BinaryCrossentropy, "BinaryCrossentropy",), + "loss", + ( + losses_module.BinaryCrossentropy, + "BinaryCrossentropy", + ), ) @pytest.mark.parametrize("n_outputs_", (1, 2)) def test_loss_routed_params_iterable(loss, n_outputs_): @@ -182,7 +200,11 @@ def test_loss_routed_params_iterable(loss, n_outputs_): @pytest.mark.parametrize( - "loss", (losses_module.BinaryCrossentropy, "BinaryCrossentropy",), + "loss", + ( + losses_module.BinaryCrossentropy, + "BinaryCrossentropy", + ), ) @pytest.mark.parametrize("n_outputs_", (1, 2)) def test_loss_routed_params_dict(loss, n_outputs_): @@ -310,7 +332,9 @@ def test_metrics_two_metric_per_output(n_outputs_): ] est = MultiOutputClassifier( - model=get_model, loss="binary_crossentropy", metrics=metrics_, + model=get_model, + loss="binary_crossentropy", + metrics=metrics_, ) est.fit(X, y) if n_outputs_ == 1: @@ -330,7 +354,9 @@ def test_metrics_two_metric_per_output(n_outputs_): # Dict of metrics est = MultiOutputClassifier( - model=get_model, loss="binary_crossentropy", metrics=metrics_, + model=get_model, + loss="binary_crossentropy", + metrics=metrics_, ) est.fit(X, y) if n_outputs_ == 1: @@ -441,7 +467,11 @@ def test_metrics_invalid_string(): "acccuracy", ] # acccuracy (extra `c`) is not a metric - est = KerasClassifier(model=get_model, loss="binary_crossentropy", metrics=metrics,) + est = KerasClassifier( + model=get_model, + loss="binary_crossentropy", + metrics=metrics, + ) with pytest.raises(ValueError, match="Unknown metric function"): est.fit(X, y) diff --git a/tests/test_errors.py b/tests/test_errors.py index d2f48f87..1adf808a 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -17,7 +17,10 @@ def test_X_shape_change(): changes shape in subsequent partial fit calls. """ - estimator = KerasRegressor(model=dynamic_regressor, hidden_layer_sizes=(100,),) + estimator = KerasRegressor( + model=dynamic_regressor, + hidden_layer_sizes=(100,), + ) X = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) y = np.array([[0, 1, 0], [1, 0, 0]]) @@ -42,8 +45,7 @@ def test_unknown_param(): def test_not_fitted_error(): - """Tests error when trying to use predict before fit. - """ + """Tests error when trying to use predict before fit.""" estimator = KerasClassifier(dynamic_classifier) X = np.random.rand(10, 20) with pytest.raises(NotFittedError): @@ -55,8 +57,7 @@ def test_not_fitted_error(): class TestInvalidBuildFn: - """Tests various error cases for BuildFn. - """ + """Tests various error cases for BuildFn.""" def test_invalid_build_fn(self): class Model: @@ -83,7 +84,9 @@ def _keras_build_fn(self, hidden_layer_sizes=(100,)): def dummy_func(): return None - clf = Clf(model=dummy_func,) + clf = Clf( + model=dummy_func, + ) with pytest.raises(ValueError, match="cannot implement ``_keras_build_fn``"): clf.fit(np.array([[0], [1]]), np.array([0, 1])) @@ -95,7 +98,8 @@ def test_sample_weights_all_zero(): """ # build estimator estimator = KerasClassifier( - model=dynamic_classifier, model__hidden_layer_sizes=(100,), + model=dynamic_classifier, + model__hidden_layer_sizes=(100,), ) # we create 20 points @@ -166,7 +170,12 @@ def get_model(compile, meta, compile_kwargs): model.compile(**compile_kwargs) return model - est = KerasRegressor(model=get_model, loss="mse", compile=compile, optimizer=None,) + est = KerasRegressor( + model=get_model, + loss="mse", + compile=compile, + optimizer=None, + ) with pytest.raises( ValueError, match="Could not interpret optimizer identifier" # Keras error ): @@ -181,7 +190,8 @@ def test_target_dtype_changes_incremental_fit(): est.fit(X, y) est.partial_fit(X, y.astype(np.uint8)) with pytest.raises( - ValueError, match="Got y with dtype", + ValueError, + match="Got y with dtype", ): est.partial_fit(X, y.astype(np.float64)) @@ -194,7 +204,8 @@ def test_target_dims_changes_incremental_fit(): est.fit(X, y) y_new = y.reshape(-1, 1) with pytest.raises( - ValueError, match="y has 2 dimensions, but this ", + ValueError, + match="y has 2 dimensions, but this ", ): est.partial_fit(X, y_new) @@ -216,7 +227,8 @@ def test_target_shape_changes_incremental_fit_reg(): est = KerasRegressor(model=dynamic_regressor, hidden_layer_sizes=(100,)) est.fit(X, y) with pytest.raises( - ValueError, match="Detected ``y`` to have ", + ValueError, + match="Detected ``y`` to have ", ): est.partial_fit(X, np.column_stack([y, y])) @@ -229,7 +241,8 @@ def test_X_dtype_changes_incremental_fit(): est.fit(X, y) est.partial_fit(X.astype(np.uint8), y) with pytest.raises( - ValueError, match="Got X with dtype", + ValueError, + match="Got X with dtype", ): est.partial_fit(X.astype(np.float64), y) @@ -242,7 +255,8 @@ def test_target_classes_change_incremental_fit(): est.fit(X, y) est.partial_fit(X.astype(np.uint8), y) with pytest.raises( - ValueError, match="Found unknown categories", + ValueError, + match="Found unknown categories", ): y[0] = 10 est.partial_fit(X, y) diff --git a/tests/test_input_outputs.py b/tests/test_input_outputs.py index 0ce3422b..af6499c2 100644 --- a/tests/test_input_outputs.py +++ b/tests/test_input_outputs.py @@ -25,11 +25,12 @@ class FunctionalAPIMultiInputClassifier(KerasClassifier): - """Tests Functional API Classifier with 2 inputs. - """ + """Tests Functional API Classifier with 2 inputs.""" def _keras_build_fn( - self, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + self, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: # get params n_classes_ = meta["n_classes_"] @@ -52,12 +53,13 @@ def _keras_build_fn( @property def feature_encoder(self): - return FunctionTransformer(func=lambda X: [X[:, 0], X[:, 1:4]],) + return FunctionTransformer( + func=lambda X: [X[:, 0], X[:, 1:4]], + ) def test_multi_input(): - """Tests custom multi-input Keras model. - """ + """Tests custom multi-input Keras model.""" clf = FunctionalAPIMultiInputClassifier() X = np.random.uniform(size=(10, 4)) y = np.arange(0, 10, 1, int) @@ -135,7 +137,10 @@ def test_KerasClassifier_transformers_can_be_reused(y, y_type, loss): return # not compatible, see test_KerasClassifier_loss_invariance X1, y1 = np.array([[1, 2, 3]]).T, np.array([1, 2, 3]) clf = KerasClassifier( - model=dynamic_classifier, hidden_layer_sizes=(100,), loss=loss, random_state=0, + model=dynamic_classifier, + hidden_layer_sizes=(100,), + loss=loss, + random_state=0, ) clf.fit(X1, y1) tfs = clf.target_encoder_ @@ -147,14 +152,16 @@ def test_KerasClassifier_transformers_can_be_reused(y, y_type, loss): def test_incompatible_output_dimensions(): - """Compares to the scikit-learn RandomForestRegressor classifier. - """ + """Compares to the scikit-learn RandomForestRegressor classifier.""" # create dataset with 4 outputs X = np.random.rand(10, 20) y = np.random.randint(low=0, high=3, size=(10,)) # create a model with 2 outputs - def build_fn_clf(meta: Dict[str, Any], compile_kwargs: Dict[str, Any],) -> Model: + def build_fn_clf( + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], + ) -> Model: # get params n_features_in_ = meta["n_features_in_"] diff --git a/tests/test_param_routing.py b/tests/test_param_routing.py index e1bf8762..18fd0452 100644 --- a/tests/test_param_routing.py +++ b/tests/test_param_routing.py @@ -43,8 +43,16 @@ @pytest.mark.parametrize( "wrapper, builder, expected_meta", [ - (KerasClassifier, dynamic_classifier, keras_classifier_base_meta_set,), - (KerasRegressor, dynamic_regressor, keras_regressor_base_meta_set,), + ( + KerasClassifier, + dynamic_classifier, + keras_classifier_base_meta_set, + ), + ( + KerasRegressor, + dynamic_regressor, + keras_regressor_base_meta_set, + ), ], ) def test_routing_basic(wrapper, builder, expected_meta): @@ -76,8 +84,16 @@ def build_fn(hidden_layer_sizes, foo, compile_kwargs, params, meta): @pytest.mark.parametrize( "wrapper, builder, expected_meta", [ - (KerasClassifier, dynamic_classifier, keras_classifier_base_meta_set,), - (KerasRegressor, dynamic_regressor, keras_regressor_base_meta_set,), + ( + KerasClassifier, + dynamic_classifier, + keras_classifier_base_meta_set, + ), + ( + KerasRegressor, + dynamic_regressor, + keras_regressor_base_meta_set, + ), ], ) def test_routing_kwargs(wrapper, builder, expected_meta): diff --git a/tests/test_parameters.py b/tests/test_parameters.py index 041ef727..643e54b9 100644 --- a/tests/test_parameters.py +++ b/tests/test_parameters.py @@ -19,12 +19,16 @@ class TestRandomState: @pytest.mark.parametrize( - "random_state", [0, 123, np.random.RandomState(0)], + "random_state", + [0, 123, np.random.RandomState(0)], ) @pytest.mark.parametrize( "estimator", [ - KerasRegressor(model=dynamic_regressor, model__hidden_layer_sizes=(100,),), + KerasRegressor( + model=dynamic_regressor, + model__hidden_layer_sizes=(100,), + ), KerasClassifier(model=dynamic_classifier, model__hidden_layer_sizes=(100,)), ], ) @@ -56,7 +60,10 @@ def test_random_states(self, random_state, estimator): @pytest.mark.parametrize( "estimator", [ - KerasRegressor(model=dynamic_regressor, model__hidden_layer_sizes=(100,),), + KerasRegressor( + model=dynamic_regressor, + model__hidden_layer_sizes=(100,), + ), KerasClassifier(model=dynamic_classifier, model__hidden_layer_sizes=(100,)), ], ) @@ -214,7 +221,14 @@ def test_metrics(self, metric): assert len(est.history_[metric]) == 1 -@pytest.mark.parametrize("class_weight", ("balanced", {0: 0.5, 1: 0.5}, {0: 1, 1: 1},)) +@pytest.mark.parametrize( + "class_weight", + ( + "balanced", + {0: 0.5, 1: 0.5}, + {0: 1, 1: 1}, + ), +) def test_class_weight_balanced(class_weight): """KerasClassifier should accept the class_weight parameter in the same format as ScikitLearn. Passing "balanced" will automatically compute class_weight. diff --git a/tests/test_scikit_learn_checks.py b/tests/test_scikit_learn_checks.py index 44cd66b3..f69d2521 100644 --- a/tests/test_scikit_learn_checks.py +++ b/tests/test_scikit_learn_checks.py @@ -88,8 +88,7 @@ def test_fully_compliant_estimators_low_precision(estimator, check): ], ) def test_fully_compliant_estimators_high_precision(estimator, check): - """Checks that require higher training epochs. - """ + """Checks that require higher training epochs.""" check_name = check.func.__name__ if check_name not in higher_precision: pytest.skip( @@ -101,7 +100,11 @@ def test_fully_compliant_estimators_high_precision(estimator, check): class SubclassedClassifier(KerasClassifier): def __init__( - self, model__hidden_layer_sizes=(100,), metrics=None, loss=None, **kwargs, + self, + model__hidden_layer_sizes=(100,), + metrics=None, + loss=None, + **kwargs, ): super().__init__(**kwargs) self.model__hidden_layer_sizes = model__hidden_layer_sizes @@ -110,7 +113,10 @@ def __init__( self.optimizer = "sgd" def _keras_build_fn( - self, hidden_layer_sizes, meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + self, + hidden_layer_sizes, + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: return dynamic_classifier( hidden_layer_sizes=hidden_layer_sizes, diff --git a/tests/test_serialization.py b/tests/test_serialization.py index 6fd01ceb..5a5d612a 100644 --- a/tests/test_serialization.py +++ b/tests/test_serialization.py @@ -50,10 +50,11 @@ class CustomLoss(keras.losses.MeanSquaredError): def test_custom_loss_function(): - """Test that a custom loss function can be serialized. - """ + """Test that a custom loss function can be serialized.""" estimator = KerasRegressor( - model=dynamic_regressor, loss=CustomLoss(), model__hidden_layer_sizes=(100,), + model=dynamic_regressor, + loss=CustomLoss(), + model__hidden_layer_sizes=(100,), ) check_pickle(estimator, fetch_california_housing) @@ -62,10 +63,10 @@ def test_custom_loss_function(): def build_fn_custom_model_registered( - meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: - """Dummy custom Model subclass that is registered to be serializable. - """ + """Dummy custom Model subclass that is registered to be serializable.""" @keras.utils.register_keras_serializable() class CustomModelRegistered(Model): @@ -84,17 +85,16 @@ class CustomModelRegistered(Model): def test_custom_model_registered(): - """Test that a registered subclassed Model can be serialized. - """ + """Test that a registered subclassed Model can be serialized.""" estimator = KerasRegressor(model=build_fn_custom_model_registered) check_pickle(estimator, fetch_california_housing) def build_fn_custom_model_unregistered( - meta: Dict[str, Any], compile_kwargs: Dict[str, Any], + meta: Dict[str, Any], + compile_kwargs: Dict[str, Any], ) -> Model: - """Dummy custom Model subclass that is not registed to be serializable. - """ + """Dummy custom Model subclass that is not registed to be serializable.""" class CustomModelUnregistered(Model): pass @@ -112,8 +112,7 @@ class CustomModelUnregistered(Model): def test_custom_model_unregistered(): - """Test that pickling an unregistered subclassed model works. - """ + """Test that pickling an unregistered subclassed model works.""" estimator = KerasRegressor(model=build_fn_custom_model_unregistered) check_pickle(estimator, fetch_california_housing) @@ -122,10 +121,11 @@ def test_custom_model_unregistered(): def test_run_eagerly(): - """Test that models compiled with run_eagerly can be serialized. - """ + """Test that models compiled with run_eagerly can be serialized.""" estimator = KerasRegressor( - model=dynamic_regressor, run_eagerly=True, model__hidden_layer_sizes=(100,), + model=dynamic_regressor, + run_eagerly=True, + model__hidden_layer_sizes=(100,), ) check_pickle(estimator, fetch_california_housing) @@ -148,7 +148,13 @@ def _reload(model, epoch=None): @pytest.mark.parametrize( - "optim", ["adam", "sgd", keras.optimizers.Adam(), keras.optimizers.SGD(),], + "optim", + [ + "adam", + "sgd", + keras.optimizers.Adam(), + keras.optimizers.SGD(), + ], ) def test_partial_fit_pickle(optim): """ @@ -225,24 +231,29 @@ def test_pickle_loss(metric): @pytest.mark.parametrize( - "opt_cls", [keras.optimizers.Adam, keras.optimizers.RMSprop, keras.optimizers.SGD,], + "opt_cls", + [ + keras.optimizers.Adam, + keras.optimizers.RMSprop, + keras.optimizers.SGD, + ], ) def test_pickle_optimizer(opt_cls): # Minimize a variable subject to two different # loss functions opt = opt_cls() var1 = tf.Variable(10.0) - loss1 = lambda: (var1 ** 2) / 2.0 + loss1 = lambda: (var1**2) / 2.0 opt.minimize(loss1, [var1]).numpy() - loss2 = lambda: (var1 ** 2) / 1.0 + loss2 = lambda: (var1**2) / 1.0 opt.minimize(loss2, [var1]).numpy() val_no_pickle = var1.numpy() # Do the same with a roundtrip pickle in the middle opt = opt_cls() var1 = tf.Variable(10.0) - loss1 = lambda: (var1 ** 2) / 2.0 + loss1 = lambda: (var1**2) / 2.0 opt.minimize(loss1, [var1]).numpy() - loss2 = lambda: (var1 ** 2) / 1.0 + loss2 = lambda: (var1**2) / 1.0 opt = pickle.loads(pickle.dumps(opt)) opt.minimize(loss2, [var1]).numpy() val_pickle = var1.numpy() @@ -251,8 +262,7 @@ def test_pickle_optimizer(opt_cls): def test_pickle_with_callbacks(): - """Test that models with callbacks (which hold a refence to the Keras model itself) are picklable. - """ + """Test that models with callbacks (which hold a refence to the Keras model itself) are picklable.""" clf = KerasRegressor( model=get_reg, loss="mse", callbacks=[keras.callbacks.Callback()] )