Model optimizer

Now, let's define the model optimizer, which is pretty much similar to the ones that we defined before:

def model_optimizer(disc_loss, gen_loss, learning_rate, beta1):

# Get weights and biases to update. Get them separately for the discriminator and the generator
trainable_vars = tf.trainable_variables()
disc_vars = [var for var in trainable_vars if var.name.startswith('discriminator')]
gen_vars = [var for var in trainable_vars if var.name.startswith('generator')]
for t in trainable_vars:
assert t in disc_vars or t in gen_vars

# Minimize both gen and disc costs simultaneously
disc_train_optimizer = tf.train.AdamOptimizer(learning_rate, beta1=beta1).minimize(disc_loss,
var_list=disc_vars)
gen_train_optimizer = tf.train.AdamOptimizer(learning_rate, beta1=beta1).minimize(gen_loss, var_list=gen_vars)
shrink_learning_rate = tf.assign(learning_rate, learning_rate * 0.9)

return disc_train_optimizer, gen_train_optimizer, shrink_learning_rate
..................Content has been hidden....................

You can't read the all page of ebook, please click here login for view all page.
Reset