Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

REF/ENH: add parameter routing #67

Merged
merged 30 commits into from
Sep 15, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
4e6fe8c
initial attempt at param routing
adriangb Aug 25, 2020
b355e50
add tests, fix win failure
adriangb Aug 25, 2020
289d058
fix upcast func
adriangb Aug 25, 2020
07112cb
Add check
adriangb Aug 25, 2020
a5bdd5f
fix test for tf 2.2.0
adriangb Aug 25, 2020
4bc17b4
remove unused logic branhc
adriangb Aug 25, 2020
be9814f
edit docs
adriangb Aug 25, 2020
c4d63cb
remove default params util
adriangb Aug 25, 2020
34ba2d1
fix typo
adriangb Aug 25, 2020
cf42f4e
add ability to do hyperparameter tuning on unset routed params
adriangb Aug 25, 2020
8a0ff71
Make build_params actual parameters of build_fn
adriangb Aug 25, 2020
2932984
fix imports, rename parameters
adriangb Aug 27, 2020
e0750e0
remove live optimizer obj fr omtest
adriangb Aug 27, 2020
49a565e
fix undef X
adriangb Aug 29, 2020
4bcbe5b
undo accidental change
adriangb Aug 29, 2020
bc3904a
add new tests
adriangb Aug 31, 2020
76e411c
Merge branch 'master' into param-routing
adriangb Aug 31, 2020
f464bbd
revert change
adriangb Aug 31, 2020
c2b2707
Merge branch 'master' into param-routing
adriangb Aug 31, 2020
e3e9716
add test for routed nonrouted equivalence
adriangb Sep 1, 2020
8404024
remove now redundant test
adriangb Sep 1, 2020
6e46a7f
Merge branch 'param-routing' of https://github.com/adriangb/scikeras …
adriangb Sep 1, 2020
bfa1789
Merge branch 'master' into param-routing
adriangb Sep 2, 2020
a6aa852
make passing unexpected kwargs a typeerror, clean up set unpacking, r…
adriangb Sep 2, 2020
c5d9d32
add tests for kwarg
adriangb Sep 2, 2020
eb2c5d0
force cooperative inheritence
adriangb Sep 2, 2020
7bb2995
PR comments
adriangb Sep 15, 2020
ceffe9b
more doc updates
adriangb Sep 15, 2020
3fe8e62
use model.compile(**compile_kwargs) syntax in tests
adriangb Sep 15, 2020
1a38997
make test less sensitive
adriangb Sep 15, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file removed .DS_Store
Binary file not shown.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -108,3 +108,6 @@ ENV/

# Poetry lock file
poetry.lock

# MacOS files
.DS_Store
27 changes: 14 additions & 13 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,13 +66,13 @@ The signature of the model building function will be used to dynamically determi
from scikeras.wrappers import KerasRegressor


def model_building_function(X, n_outputs_, hidden_layer_sizes):
def model_building_function(meta, hidden_layer_sizes):
"""Dynamically build regressor."""
model = Sequential()
model.add(Dense(X.shape[1], activation="relu", input_shape=X.shape[1:]))
model.add(Dense(meta["X_shape_"][1], activation="relu", input_shape=meta["X_shape_"][1:]))
for size in hidden_layer_sizes:
model.add(Dense(size, activation="relu"))
model.add(Dense(n_outputs_))
model.add(Dense(meta["n_outputs_"]))
model.compile("adam", loss="mean_squared_error")
return model

Expand Down Expand Up @@ -103,15 +103,15 @@ class MLPRegressor(KerasRegressor):
def __init__(self, hidden_layer_sizes=None):
self.hidden_layer_sizes = hidden_layer_sizes

def _keras_build_fn(self, X, n_outputs_, hidden_layer_sizes):
def _keras_build_fn(self, meta, hidden_layer_sizes):
"""Dynamically build regressor."""
if hidden_layer_sizes is None:
hidden_layer_sizes = (100, )
model = Sequential()
model.add(Dense(X.shape[1], activation="relu", input_shape=X.shape[1:]))
model.add(Dense(meta["X_shape_"][1], activation="relu", input_shape=meta["X_shape_"][1:]))
for size in hidden_layer_sizes:
model.add(Dense(size, activation="relu"))
model.add(Dense(n_outputs_))
model.add(Dense(meta["n_outputs_"]))
model.compile("adam", loss=KerasRegressor.r_squared)
return model
```
Expand All @@ -132,7 +132,7 @@ class MLPRegressor(KerasRegressor):
self.hidden_layer_sizes = hidden_layer_sizes
super().__init__(**kwargs) # this is very important!

def _keras_build_fn(self, X, n_outputs_, hidden_layer_sizes):
def _keras_build_fn(self, meta, hidden_layer_sizes):
...

estimator = MLPRegressor(hidden_layer_sizes=[200], a_kwarg="saveme")
Expand All @@ -152,7 +152,7 @@ class ChildMLPRegressor(MLPRegressor):
self.child_argument = child_argument
super().__init__(**kwargs) # this is very important!

def _keras_build_fn(self, X, n_outputs_, hidden_layer_sizes):
def _keras_build_fn(self, meta, hidden_layer_sizes):
...

estimator = ChildMLPRegressor(child_argument="hello", a_kwarg="saveme")
Expand Down Expand Up @@ -218,13 +218,13 @@ class FunctionalAPIMultiOutputClassifier(KerasClassifier):
"""Functional API Classifier with 2 outputs of different type.
"""

def _keras_build_fn(self, X, n_classes_):
def _keras_build_fn(self, meta):
inp = Input((4,))

x1 = Dense(100)(inp)

binary_out = Dense(1, activation="sigmoid")(x1)
cat_out = Dense(n_classes_[1], activation="softmax")(x1)
cat_out = Dense(meta["n_classes_"][1], activation="softmax")(x1)

model = Model([inp], [binary_out, cat_out])
losses = ["binary_crossentropy", "categorical_crossentropy"]
Expand Down Expand Up @@ -272,7 +272,7 @@ class FunctionalAPIMultiInputClassifier(KerasClassifier):
"""Functional API Classifier with 2 inputs.
"""

def _keras_build_fn(self, n_classes_):
def _keras_build_fn(self, meta):
inp1 = Input((1,))
inp2 = Input((3,))

Expand All @@ -281,7 +281,7 @@ class FunctionalAPIMultiInputClassifier(KerasClassifier):

x3 = Concatenate(axis=-1)([x1, x2])

cat_out = Dense(n_classes_, activation="softmax")(x3)
cat_out = Dense(meta["n_classes_"], activation="softmax")(x3)

model = Model([inp1, inp2], [cat_out])
losses = ["categorical_crossentropy"]
Expand Down Expand Up @@ -331,9 +331,10 @@ class ClassifierWithCallback(KerasClassifier):
"""
adriangb marked this conversation as resolved.
Show resolved Hide resolved

def __init__(self, tolerance, hidden_dim=None):
super().__init__()
self.callbacks = [SentinalCallback(tolerance)]
self.hidden_dim = hidden_dim
super().__init__()


adriangb marked this conversation as resolved.
Show resolved Hide resolved
def _keras_build_fn(self, hidden_dim):
return build_fn_clf(hidden_dim)
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ docs = ["sphinx", "sphinx_rtd_theme"]

[tool.isort]
line_length = 79
force_single_line = true
atomic = true
include_trailing_comma = true
lines_after_imports = 2
Expand Down
89 changes: 77 additions & 12 deletions scikeras/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
import random
import warnings

from typing import Any, Callable, Dict, Iterable, List, Union

import numpy as np
import tensorflow as tf

from sklearn.base import BaseEstimator
from sklearn.base import TransformerMixin
from sklearn.base import BaseEstimator, TransformerMixin
from tensorflow.keras.layers import deserialize as deserialize_layer
from tensorflow.keras.layers import serialize as serialize_layer
from tensorflow.keras.metrics import deserialize as deserialize_metric
Expand Down Expand Up @@ -155,19 +156,83 @@ def get_metric_full_name(name: str) -> str:
# deserialize returns the actual function, then get it's name
# to keep a single consistent name for the metric
if name == "loss":
# may be passed "loss" from thre training history
# may be passed "loss" from training history
return name
return getattr(deserialize_metric(name), "__name__")


def _get_default_args(func):
signature = inspect.signature(func)
return {
k: v.default
for k, v in signature.parameters.items()
if v.default is not inspect.Parameter.empty
}
def _windows_upcast_ints(
arr: Union[List[np.ndarray], np.ndarray]
) -> Union[List[np.ndarray], np.ndarray]:
# see tensorflow/probability#886
def _upcast(x):
return x.astype("int64") if x.dtype == np.int32 else x

if isinstance(arr, np.ndarray):
return _upcast(arr)
else:
return [_upcast(x_) for x_ in arr]


def route_params(
params: Dict[str, Any], destination: str, pass_filter: Iterable[str],
adriangb marked this conversation as resolved.
Show resolved Hide resolved
) -> Dict[str, Any]:
"""Route and trim parameter names.

Parameters
----------
params : Dict[str, Any]
Parameters to route/filter.
destination : str
Destination to route to, ex: `build` or `compile`.
pass_filter: Iterable[str]
Only keys from `params` that are in the iterable are passed.
This does not affect routed parameters.

def _windows_upcast_ints(x: np.ndarray) -> np.ndarray:
return x.astype("int64") if x.dtype == np.int32 else x
Returns
-------
Dict[str, Any]
Filtered parameters, with any routing prefixes removed.
"""
res = dict()
for key, val in params.items():
if "__" in key:
# routed param
if key.startswith(destination):
new_key = key[len(destination + "__") :]
res[new_key] = val
else:
# non routed
if pass_filter is None or key in pass_filter:
res[key] = val
return res


def has_param(func: Callable, param: str) -> bool:
"""[summary]

Parameters
----------
func : Callable
[description]
param : str
[description]

Returns
-------
bool
[description]
"""
return any(
p.name == param
for p in inspect.signature(func).parameters.values()
if p.kind in (p.POSITIONAL_OR_KEYWORD, p.KEYWORD_ONLY)
)


def accepts_kwargs(func: Callable) -> bool:
return any(
True
for param in inspect.signature(func).parameters.values()
if param.kind == param.VAR_KEYWORD
)
Loading