Advanced Deep Learning with Keras
Chapter 6lr = 2e-4decay = 6e-8input_shape = (image_size, image_size, 1)label_shape = (num_labels, )z_dim = 50z_shape = (z_dim, )feature1_dim = 256feature1_shape = (feature1_dim, )# build discriminator 0 and Q network 0 modelsinputs = Input(shape=input_shape, name='discriminator0_input')dis0 = gan.discriminator(inputs, num_codes=z_dim)# [1] uses Adam, but discriminator converges easily with RMSpropoptimizer = RMSprop(lr=lr, decay=decay)# loss fuctions: 1) probability image is real (adversarial0 loss)# 2) MSE z0 recon loss (Q0 network loss or entropy0 loss)loss = ['binary_crossentropy', 'mse']loss_weights = [1.0, 10.0]dis0.compile(loss=loss,loss_weights=loss_weights,optimizer=optimizer,metrics=['accuracy'])dis0.summary() # image discriminator, z0 estimator# build discriminator 1 and Q network 1 modelsinput_shape = (feature1_dim, )inputs = Input(shape=input_shape, name='discriminator1_input')dis1 = build_discriminator(inputs, z_dim=z_dim )# loss fuctions: 1) probability feature1 is real (adversarial1loss)# 2) MSE z1 recon loss (Q1 network loss or entropy1 loss)loss = ['binary_crossentropy', 'mse']loss_weights = [1.0, 1.0]dis1.compile(loss=loss,loss_weights=loss_weights,optimizer=optimizer,metrics=['accuracy'])dis1.summary() # feature1 discriminator, z1 estimator# build generator modelsfeature1 = Input(shape=feature1_shape, name='feature1_input')labels = Input(shape=label_shape, name='labels')z1 = Input(shape=z_shape, name="z1_input")z0 = Input(shape=z_shape, name="z0_input")[ 191 ]
Disentangled Representation GANslatent_codes = (labels, z0, z1, feature1)gen0, gen1 = build_generator(latent_codes, image_size)gen0.summary() # image generatorgen1.summary() # feature1 generator# build encoder modelsinput_shape = (image_size, image_size, 1)inputs = Input(shape=input_shape, name='encoder_input')enc0, enc1 = build_encoder((inputs, feature1), num_labels)enc0.summary() # image to feature1 encoderenc1.summary() # feature1 to labels encoder (classifier)encoder = Model(inputs, enc1(enc0(inputs)))encoder.summary() # image to labels encoder (classifier)data = (x_train, y_train), (x_test, y_test)train_encoder(encoder, data, model_name=model_name)# build adversarial0 model =# generator0 + discriminator0 + encoder0optimizer = RMSprop(lr=lr*0.5, decay=decay*0.5)# encoder0 weights frozenenc0.trainable = False# discriminator0 weights frozendis0.trainable = Falsegen0_inputs = [feature1, z0]gen0_outputs = gen0(gen0_inputs)adv0_outputs = dis0(gen0_outputs) + [enc0(gen0_outputs)]# feature1 + z0 to prob feature1 is# real + z0 recon + feature0/image reconadv0 = Model(gen0_inputs, adv0_outputs, name="adv0")# loss functions: 1) prob feature1 is real (adversarial0 loss)# 2) Q network 0 loss (entropy0 loss)# 3) conditional0 lossloss = ['binary_crossentropy', 'mse', 'mse']loss_weights = [1.0, 10.0, 1.0]adv0.compile(loss=loss,loss_weights=loss_weights,optimizer=optimizer,metrics=['accuracy'])adv0.summary()# build adversarial1 model =# generator1 + discriminator1 + encoder1# encoder1 weights frozen[ 192 ]
- Page 157 and 158: Improved GANsfor layer in discrimin
- Page 159 and 160: Improved GANsFollowing figure shows
- Page 161 and 162: Improved GANsThe preceding table sh
- Page 163 and 164: Improved GANsFollowing figure shows
- Page 165 and 166: Improved GANsEssentially, in CGAN w
- Page 167 and 168: Improved GANslayer = Dense(layer_fi
- Page 169 and 170: Improved GANsx = BatchNormalization
- Page 171 and 172: Improved GANsdiscriminator.compile(
- Page 173 and 174: Improved GANssize=batch_size)real_i
- Page 175 and 176: Improved GANsUnlike CGAN, the sampl
- Page 177 and 178: Improved GANsConclusionIn this chap
- Page 179 and 180: Disentangled Representation GANsIn
- Page 181 and 182: Disentangled Representation GANsInf
- Page 183 and 184: Disentangled Representation GANsFol
- Page 185 and 186: Disentangled Representation GANs# A
- Page 187 and 188: Disentangled Representation GANsif
- Page 189 and 190: Disentangled Representation GANsLis
- Page 191 and 192: Disentangled Representation GANsdat
- Page 193 and 194: Disentangled Representation GANsy[b
- Page 195 and 196: Disentangled Representation GANspyt
- Page 197 and 198: Disentangled Representation GANsThe
- Page 199 and 200: Disentangled Representation GANsSta
- Page 201 and 202: Disentangled Representation GANs( )
- Page 203 and 204: Disentangled Representation GANsThe
- Page 205 and 206: Disentangled Representation GANsfea
- Page 207: Disentangled Representation GANs# f
- Page 211 and 212: Disentangled Representation GANsDis
- Page 213 and 214: Disentangled Representation GANsz_d
- Page 215 and 216: Disentangled Representation GANs2.
- Page 217 and 218: Disentangled Representation GANsFig
- Page 220 and 221: Cross-Domain GANsIn computer vision
- Page 222 and 223: Chapter 7There are many more exampl
- Page 224 and 225: The CycleGAN ModelFigure 7.1.3 show
- Page 226 and 227: Chapter 7Repeat for n training step
- Page 228 and 229: Chapter 7Implementing CycleGAN usin
- Page 230 and 231: filters=16,kernel_size=3,strides=2,
- Page 232 and 233: Chapter 7kernel_size=kernel_size)e3
- Page 234 and 235: Listing 7.1.3, cyclegan-7.1.1.py sh
- Page 236 and 237: Chapter 71) Build target and source
- Page 238 and 239: Chapter 7preal_target,reco_source,r
- Page 240 and 241: size=batch_size)real_source = sourc
- Page 242 and 243: Chapter 7returndirs=dirs,show=True)
- Page 244 and 245: Chapter 7Figure 7.1.10: Color (from
- Page 246 and 247: [ 229 ]Chapter 7titles = ('MNIST pr
- Page 248 and 249: Chapter 7Figure 7.1.13: Style trans
- Page 250 and 251: Chapter 7Figure 7.1.15: The backwar
- Page 252: Chapter 7References1. Yuval Netzer
- Page 255 and 256: Variational Autoencoders (VAEs)In t
- Page 257 and 258: Variational Autoencoders (VAEs)Typi
Disentangled Representation GANs
latent_codes = (labels, z0, z1, feature1)
gen0, gen1 = build_generator(latent_codes, image_size)
gen0.summary() # image generator
gen1.summary() # feature1 generator
# build encoder models
input_shape = (image_size, image_size, 1)
inputs = Input(shape=input_shape, name='encoder_input')
enc0, enc1 = build_encoder((inputs, feature1), num_labels)
enc0.summary() # image to feature1 encoder
enc1.summary() # feature1 to labels encoder (classifier)
encoder = Model(inputs, enc1(enc0(inputs)))
encoder.summary() # image to labels encoder (classifier)
data = (x_train, y_train), (x_test, y_test)
train_encoder(encoder, data, model_name=model_name)
# build adversarial0 model =
# generator0 + discriminator0 + encoder0
optimizer = RMSprop(lr=lr*0.5, decay=decay*0.5)
# encoder0 weights frozen
enc0.trainable = False
# discriminator0 weights frozen
dis0.trainable = False
gen0_inputs = [feature1, z0]
gen0_outputs = gen0(gen0_inputs)
adv0_outputs = dis0(gen0_outputs) + [enc0(gen0_outputs)]
# feature1 + z0 to prob feature1 is
# real + z0 recon + feature0/image recon
adv0 = Model(gen0_inputs, adv0_outputs, name="adv0")
# loss functions: 1) prob feature1 is real (adversarial0 loss)
# 2) Q network 0 loss (entropy0 loss)
# 3) conditional0 loss
loss = ['binary_crossentropy', 'mse', 'mse']
loss_weights = [1.0, 10.0, 1.0]
adv0.compile(loss=loss,
loss_weights=loss_weights,
optimizer=optimizer,
metrics=['accuracy'])
adv0.summary()
# build adversarial1 model =
# generator1 + discriminator1 + encoder1
# encoder1 weights frozen
[ 192 ]