Skip to content

Commit

Permalink
Changed optimizer to RMSProp
Browse files Browse the repository at this point in the history
  • Loading branch information
vvanirudh committed Feb 7, 2017
1 parent d750a73 commit 334f38f
Showing 1 changed file with 6 additions and 5 deletions.
11 changes: 6 additions & 5 deletions model.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,14 @@ def gaussian_nll(mean_values, var_values, y):
x_at = self.adversarial_input_data
for i in range(0, len(sizes)-2):
x_at = tf.nn.relu(tf.add(tf.matmul(x_at, self.weights[i]), self.biases[i]))

output_at = tf.add(tf.matmul(x_at, self.weights[-1]), self.biases[-1])
mean_at = tf.reshape(output_at[:, 0], [-1, 1])
raw_var_at = tf.reshape(output_at[:, 1], [-1, 1])

mean_at, raw_var_at = tf.split(1, 2, output_at)

var_at = tf.log(1 + tf.exp(raw_var_at)) + 1e-6

lossfunc_vec_at = gaussian_nll(mean_at, var_at, self.target_data)
self.nll_at = tf.reduce_mean(lossfunc_vec_at)
self.nll_at = gaussian_nll(mean_at, var_at, self.target_data)

tvars = tf.trainable_variables()

Expand All @@ -66,6 +67,6 @@ def gaussian_nll(mean_values, var_values, y):

self.clipped_gradients, _ = tf.clip_by_global_norm(self.gradients, args.grad_clip)

optimizer = tf.train.AdamOptimizer(self.lr)
optimizer = tf.train.RMSPropOptimizer(self.lr)

self.train_op = optimizer.apply_gradients(zip(self.clipped_gradients, tvars))

0 comments on commit 334f38f

Please sign in to comment.