-
Notifications
You must be signed in to change notification settings - Fork 46
/
model.py
64 lines (52 loc) · 2.44 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import numpy as np
import keras
from keras.layers import Reshape,LeakyReLU , Conv2D , Dense ,Input , Lambda , Conv2DTranspose , Flatten , Dropout , Activation
from keras.models import Model
from keras.datasets.mnist import load_data
from keras import backend
from keras.optimizers import Adam
from matplotlib import pyplot
def define_generator(latent_dim,alpha_):
input_ = Input(shape=(latent_dim,))
nodes = 7*7*128
generator = Dense(nodes)(input_)
generator = LeakyReLU(alpha = alpha_)(generator)
generator = Reshape((7,7,128))(generator)
generator = Conv2DTranspose(128 ,kernel_size= (4,4), strides = (2,2) , padding = 'same' )(generator)
generator = LeakyReLU(alpha = alpha_)(generator)
generator = Conv2DTranspose(128 ,kernel_size= (4,4), strides = (2,2) , padding = 'same' )(generator)
generator = LeakyReLU(alpha = alpha_)(generator)
final_layer = Conv2D(1 ,(7,7),activation = 'tanh',padding ='same' )(generator)
print(input_.shape)
model = Model(input_ , final_layer)
return model
def custom_activation(output):
logexp_sum = backend.sum(backend.exp(output), axis =-1 , keepdims =True)
result = logexp_sum / (logexp_sum+1.0)
return result
def define_discriminator(alpha_,dropout_,lr_,beta_1_,input_shape=(28,28,1), num_classes =10):
input_img = Input(shape = input_shape)
dis = Conv2D(128 , (3,3) , strides =(2,2) , padding = 'same')(input_img)
dis = LeakyReLU(alpha = alpha_)(dis)
dis = Conv2D(128 , (3,3) , strides =(2,2) , padding = 'same')(dis)
dis = LeakyReLU(alpha = alpha_)(dis)
dis = Conv2D(128 , (3,3) , strides =(2,2) , padding = 'same')(dis)
dis = LeakyReLU(alpha = alpha_)(dis)
dis = Flatten()(dis)
dis = Dropout(dropout_)(dis)
dis = Dense(num_classes)(dis)
#### supervised output
s_out_layer = Activation('softmax')(dis)
s_model = Model(input_img , s_out_layer)
s_model.compile(loss='sparse_categorical_crossentropy',optimizer = Adam(lr=lr_, beta_1=beta_1_) , metrics =['accuracy'] )
#### unsupervised output
us_out_layer = Lambda(custom_activation)(dis)
us_model = Model(input_img , us_out_layer)
us_model.compile(loss='binary_crossentropy', optimizer=Adam(lr=lr_, beta_1=beta_1_))
return s_model , us_model
def define_gan(g_model , dis_model,lr_,beta_1_):
dis_model.trainable = False
gan_output = dis_model(g_model.output)
gan_model = Model(g_model.input , gan_output)
gan_model.compile(loss = 'binary_crossentropy' , optimizer = Adam(lr = lr_ , beta_1 =beta_1_))
return gan_model