Skip to content

Commit

Permalink
Rename dnn_units to 'hidden_units'
Browse files Browse the repository at this point in the history
  • Loading branch information
jackguagua committed Sep 9, 2020
1 parent 07ca646 commit d1201d9
Show file tree
Hide file tree
Showing 9 changed files with 49 additions and 43 deletions.
4 changes: 2 additions & 2 deletions deeptables/models/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ def __new__(cls,
optimizer='auto',
loss='auto',
dnn_params={
'dnn_units': ((128, 0, False), (64, 0, False)),
'dnn_activation': 'relu',
'hidden_units': ((128, 0, False), (64, 0, False)),
'activation': 'relu',
},
autoint_params={
'num_attention': 3,
Expand Down
2 changes: 1 addition & 1 deletion deeptables/models/deeptable.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ class DeepTable:
loss: str or object, (default='auto')
dnn_params: dict, (default={'dnn_units': ((128, 0, False), (64, 0, False)),
dnn_params: dict, (default={'hidden_units': ((128, 0, False), (64, 0, False)),
'dnn_activation': 'relu'})
autoint_params:dict, (default={'num_attention': 3,'num_heads': 1,
Expand Down
29 changes: 14 additions & 15 deletions deeptables/models/hyper_dt.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,13 @@ def _on_params_ready(self):


class DTFit(ModuleSpace):
def __init__(self, batch_size=128, epochs=1000, space=None, name=None, **hyperparams):
def __init__(self, batch_size=128, epochs=None, space=None, name=None, **hyperparams):
if batch_size is None:
batch_size = Choice([128, 256, 512])
hyperparams['batch_size'] = batch_size

if epochs is None:
epochs = 1000
hyperparams['epochs'] = epochs
if epochs is not None:
hyperparams['epochs'] = epochs

ModuleSpace.__init__(self, space, name, **hyperparams)
self.space.fit_params = self
Expand All @@ -52,11 +51,11 @@ def _on_params_ready(self):


class DnnModule(ModuleSpace):
def __init__(self, dnn_units=None, reduce_factor=None, dnn_dropout=None, use_bn=None, dnn_layers=None,
def __init__(self, hidden_units=None, reduce_factor=None, dnn_dropout=None, use_bn=None, dnn_layers=None,
activation=None, space=None, name=None, **hyperparams):
if dnn_units is None:
dnn_units = Choice([100, 200, 300, 500, 800, 1000])
hyperparams['dnn_units'] = dnn_units
if hidden_units is None:
hidden_units = Choice([100, 200, 300, 500, 800, 1000])
hyperparams['hidden_units'] = hidden_units

if reduce_factor is None:
reduce_factor = Choice([1, 0.8, 0.5])
Expand All @@ -82,15 +81,15 @@ def __init__(self, dnn_units=None, reduce_factor=None, dnn_dropout=None, use_bn=

def _compile(self):
dnn_layers = self.param_values['dnn_layers']
dnn_units = []
hidden_units = []
for i in range(0, dnn_layers):
dnn_units.append(
(int(self.param_values['dnn_units'] * 1 if i == 0 else (
self.param_values['dnn_units'] * (self.param_values['reduce_factor'] ** i))),
hidden_units.append(
(int(self.param_values['hidden_units'] * 1 if i == 0 else (
self.param_values['hidden_units'] * (self.param_values['reduce_factor'] ** i))),
self.param_values['dnn_dropout'],
self.param_values['use_bn']))
dnn_params = {
'dnn_units': dnn_units,
'hidden_units': hidden_units,
'dnn_activation': self.param_values['activation'],
}
self.space.DT_Module.config = self.space.DT_Module.config._replace(dnn_params=dnn_params)
Expand Down Expand Up @@ -225,7 +224,7 @@ def mini_dt_space():
apply_class_weight=Bool(),
earlystopping_patience=Choice([1, 3, 5])
)
dnn = DnnModule(dnn_units=Choice([100, 200]),
dnn = DnnModule(hidden_units=Choice([100, 200]),
reduce_factor=Choice([1, 0.8]),
dnn_dropout=Choice([0, 0.3]),
use_bn=Bool(),
Expand Down Expand Up @@ -261,7 +260,7 @@ def mini_dt_space():
# optimizer='auto',
# loss='auto',
# dnn_params={
# 'dnn_units': ((128, 0, False), (64, 0, False)),
# 'hidden_units': ((128, 0, False), (64, 0, False)),
# 'dnn_activation': 'relu',
# },
# autoint_params={
Expand Down
2 changes: 1 addition & 1 deletion docs/source/examples.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ conf = ModelConfig(
metrics=['RootMeanSquaredError'],
nets=['dnn_nets'],
dnn_params={
'dnn_units': ((256, 0.3, True), (256, 0.3, True)),
'hidden_units': ((256, 0.3, True), (256, 0.3, True)),
'dnn_activation': 'relu',
},
earlystopping_patience=5,
Expand Down
8 changes: 7 additions & 1 deletion docs/source/model_config.md
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,12 @@ str, (default=`'add'`)
### output_use_bias
bool, (default=`True`)

### apply_class_weight
bool, (default='False')

Whether to calculate the weight of each class automatically. This can be useful to tell the model to "pay more attention" to samples from an under-represented class.


### optimizer
str(name of optimizer) or optimizer instance or 'auto', (default=`'auto'`)

Expand Down Expand Up @@ -259,7 +265,7 @@ dictionary
Only usable when 'dnn_nets' or a component using 'dnn' like 'pnn_nets','dcn_nets' included in [nets].
```
{
'dnn_units': ((128, 0, False), (64, 0, False)),
'hidden_units': ((128, 0, False), (64, 0, False)),
'dnn_activation': 'relu'}
)
```
Expand Down
2 changes: 1 addition & 1 deletion examples/dt_regression.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@
" metrics=['RootMeanSquaredError'], \n",
" nets=['dnn_nets'],\n",
" dnn_params={\n",
" 'dnn_units': ((256, 0.3, True), (256, 0.3, True)),\n",
" 'hidden_units': ((256, 0.3, True), (256, 0.3, True)),\n",
" 'dnn_activation': 'relu',\n",
" },\n",
" earlystopping_patience=5,\n",
Expand Down
19 changes: 10 additions & 9 deletions tests/models/hyper_dt_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def bankdata(self):
reward_metric='accuracy',
max_trails=3,
dnn_params={
'dnn_units': ((256, 0, False), (256, 0, False)),
'hidden_units': ((256, 0, False), (256, 0, False)),
'dnn_activation': 'relu',
},
)
Expand All @@ -49,13 +49,14 @@ def test_default_dt_space(self):
space = default_dt_space()
space.random_sample()
assert space.Module_DnnModule_1.param_values['dnn_layers'] == len(
space.DT_Module.config.dnn_params['dnn_units'])
assert space.Module_DnnModule_1.param_values['dnn_units'] == space.DT_Module.config.dnn_params['dnn_units'][0][
0]
space.DT_Module.config.dnn_params['hidden_units'])
assert space.Module_DnnModule_1.param_values['hidden_units'] == \
space.DT_Module.config.dnn_params['hidden_units'][0][
0]
assert space.Module_DnnModule_1.param_values['dnn_dropout'] == \
space.DT_Module.config.dnn_params['dnn_units'][0][
space.DT_Module.config.dnn_params['hidden_units'][0][
1]
assert space.Module_DnnModule_1.param_values['use_bn'] == space.DT_Module.config.dnn_params['dnn_units'][0][
assert space.Module_DnnModule_1.param_values['use_bn'] == space.DT_Module.config.dnn_params['hidden_units'][0][
2]

def test_hyper_dt(self):
Expand All @@ -64,7 +65,7 @@ def test_hyper_dt(self):
callbacks=[SummaryCallback()],
reward_metric='accuracy',
dnn_params={
'dnn_units': ((256, 0, False), (256, 0, False)),
'hidden_units': ((256, 0, False), (256, 0, False)),
'dnn_activation': 'relu',
},
cache_preprocessed_data=True,
Expand All @@ -77,11 +78,11 @@ def test_hyper_dt(self):

y = np.random.randint(0, 2, size=(100), dtype='int')
df = pd.DataFrame({'x1': x1, 'x2': x2, 'x3': x3, 'x4': x4})
hdt.search(df, y, df, y, max_trails=3, )
hdt.search(df, y, df, y, max_trails=3, epochs=1)
assert hdt.best_model
best_trial = hdt.get_best_trail()

estimator = hdt.final_train(best_trial.space_sample, df, y, cross_validation=True, num_folds=3)
estimator = hdt.final_train(best_trial.space_sample, df, y, epochs=1)
score = estimator.predict(df)
result = estimator.evaluate(df, y)
assert len(score) == 100
Expand Down
26 changes: 13 additions & 13 deletions tests/utils/batch_trainer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class Test_Batch_Trainer:
def test_run_binary_heart_disease_CV(self):
data = dsutils.load_heart_disease_uci()
conf = deeptable.ModelConfig(
dnn_params={'dnn_units': ((256, 0, False), (256, 0, False)),
dnn_params={'hidden_units': ((256, 0, False), (256, 0, False)),
'dnn_activation': 'relu'},
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand Down Expand Up @@ -46,7 +46,7 @@ def test_run_binary_heart_disease_CV(self):
def test_run_lgbm(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
dnn_params={'dnn_units': ((256, 0, False), (256, 0, False)),
dnn_params={'hidden_units': ((256, 0, False), (256, 0, False)),
'dnn_activation': 'relu'},
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand All @@ -72,7 +72,7 @@ def test_run_lgbm(self):
def test_run_catboost(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
dnn_params={'dnn_units': ((256, 0, False), (256, 0, False)),
dnn_params={'hidden_units': ((256, 0, False), (256, 0, False)),
'dnn_activation': 'relu'},
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand All @@ -97,7 +97,7 @@ def test_run_catboost(self):
def test_run_binary(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
dnn_params={'dnn_units': ((256, 0, False), (256, 0, False)),
dnn_params={'hidden_units': ((256, 0, False), (256, 0, False)),
'dnn_activation': 'relu'},
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand All @@ -124,7 +124,7 @@ def test_run_regression(self):
df_train.insert(df_train.shape[1], 'target', boston_dataset.target)

conf = deeptable.ModelConfig(
dnn_params={'dnn_units': ((256, 0, False), (256, 0, False)),
dnn_params={'hidden_units': ((256, 0, False), (256, 0, False)),
'dnn_activation': 'relu'},
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand Down Expand Up @@ -154,7 +154,7 @@ def test_run_regression(self):
def test_run_multiclass(self):
data = dsutils.load_glass_uci()
conf = deeptable.ModelConfig(
# dnn_units=((256, 0, False), (128, 0, False)),
# hidden_units=((256, 0, False), (128, 0, False)),
# dnn_activation='relu',
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand All @@ -180,7 +180,7 @@ def test_run_multiclass(self):
def test_run_cross_validation(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
# dnn_units=((256, 0, False), (128, 0, False)),
# hidden_units=((256, 0, False), (128, 0, False)),
# dnn_activation='relu',
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand Down Expand Up @@ -241,7 +241,7 @@ def test_get_models(self):
def test_get_models_retian_single_model(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
# dnn_units=((256, 0, False), (128, 0, False)),
# hidden_units=((256, 0, False), (128, 0, False)),
# dnn_activation='relu',
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand Down Expand Up @@ -278,7 +278,7 @@ def test_get_models_retian_single_model(self):
def test_ensemble_predict_proba(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
# dnn_units=((256, 0, False), (128, 0, False)),
# hidden_units=((256, 0, False), (128, 0, False)),
# dnn_activation='relu',
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand All @@ -305,7 +305,7 @@ def test_ensemble_predict_proba(self):
def test_probe_evaluation(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
# dnn_units=((256, 0, False), (128, 0, False)),
# hidden_units=((256, 0, False), (128, 0, False)),
# dnn_activation='relu',
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand All @@ -329,7 +329,7 @@ def test_probe_evaluation(self):
def test_zero_testset(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
# dnn_units=((256, 0, False), (128, 0, False)),
# hidden_units=((256, 0, False), (128, 0, False)),
# dnn_activation='relu',
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand All @@ -354,7 +354,7 @@ def test_zero_testset(self):
def test_zero_testset_cross_validation(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
# dnn_units=((256, 0, False), (128, 0, False)),
# hidden_units=((256, 0, False), (128, 0, False)),
# dnn_activation='relu',
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand Down Expand Up @@ -412,7 +412,7 @@ def test_multi_config(self):
def test_leaderboard(self):
data = dsutils.load_adult().head(1000)
conf = deeptable.ModelConfig(
# dnn_units=((256, 0, False), (128, 0, False)),
# hidden_units=((256, 0, False), (128, 0, False)),
# dnn_activation='relu',
fixed_embedding_dim=False,
embeddings_output_dim=0,
Expand Down
File renamed without changes.

0 comments on commit d1201d9

Please sign in to comment.