-
Notifications
You must be signed in to change notification settings - Fork 0
/
nn_tuner.py
100 lines (89 loc) · 3.34 KB
/
nn_tuner.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
import pandas
import numpy as np
from keras import optimizers
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from keras.utils import np_utils
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import LabelEncoder
from sklearn.pipeline import Pipeline
import matplotlib.pyplot as plt
from kerastuner import HyperModel
from kerastuner.tuners import Hyperband, RandomSearch, BayesianOptimization
class NN_Tuner(HyperModel):
def __init__(self, input_dim, output_dim):
self.inputs = input_dim
self.outputs = output_dim
def build(self, hp):
model = Sequential()
model.add(
Dense(units=hp.Int('units', min_value=8, max_value=512, step=8, default=16),
input_dim=self.inputs,
activation=hp.Choice('dense_activation', values=['relu', 'tanh', 'sigmoid'], default='relu')))
layers=hp.Int('layers', min_value=1, max_value=5, step=1, default=2)
for _ in range(layers):
model.add(
Dense(units=hp.Int('units', min_value=8, max_value=512, step=8, default=16),
activation=hp.Choice('dense_activation', values=['relu', 'tanh', 'sigmoid'], default='relu')))
model.add(Dense(self.outputs, activation='softmax'))
model.compile(
optimizer=optimizers.Adam(
hp.Float(
'learning_rate',
min_value=1e-5,
max_value=0.1,
sampling='LOG',
default=1e-3
)
),
loss=hp.Choice('loss',
values=['categorical_crossentropy',
'kullback_leibler_divergence'],
default='categorical_crossentropy'),
metrics=['accuracy']
)
return model
def tune(epochs, x_train, y_train, x_test, y_test, tuner_type, input_dim):
hypermodel = NN_Tuner(input_dim, 3)
tuner = None
if tuner_type == "hyperband":
tuner = Hyperband(
hypermodel,
max_epochs = 30,
objective ='val_accuracy',
seed = 1,
executions_per_trial=2,
directory='hyperband',
project_name='poker-nn'
)
if tuner_type == "random":
tuner = RandomSearch(
hypermodel,
objective ='val_accuracy',
seed = 1,
max_trials = 5000,
executions_per_trial=2,
directory='random_search',
project_name='poker-nn'
)
if tuner_type == "bayes":
tuner = BayesianOptimization(
hypermodel,
objective ='val_accuracy',
seed = 1,
max_trials = 5000,
executions_per_trial=2,
directory='bayesian_opt',
project_name='poker-nn'
)
#tuner.search_space_summary()
tuner.search(x_train, y_train, epochs=epochs, validation_split=0.1)
#tuner.results_summary()
best= tuner.get_best_models(num_models=1)[0]
best.save("best_model")
loss, accuracy = best.evaluate(x_test, y_test, batch_size=256)
#print("Testing Loss: ", loss)
#print("Testing Accuracy: ", accuracy)
return loss, accuracy