From e4728984e4ece93625d58420dca88fcd474a3516 Mon Sep 17 00:00:00 2001 From: tgupta6 <tgupta6@illinois.edu> Date: Wed, 28 Sep 2016 21:12:27 -0500 Subject: [PATCH] global step in optimizer --- answer_classifier_cached_features/train.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/answer_classifier_cached_features/train.py b/answer_classifier_cached_features/train.py index 73f0f39..e2bd786 100644 --- a/answer_classifier_cached_features/train.py +++ b/answer_classifier_cached_features/train.py @@ -705,7 +705,9 @@ class attach_optimizer(): vars_to_train, learning_rate = self.learning_rate) - self.train_op = self.optimizer.minimize(graph.total_loss) + self.train_op = self.optimizer.minimize( + graph.total_loss, + self.global_step) # self.add_adam_optimizer( # graph.total_loss, @@ -854,6 +856,8 @@ class log_mgr(): eval_vars_dict['per_region_answer_prob'].shape) print np.max(eval_vars_dict['per_region_answer_prob']) + print 'Learning Rate: {}'.format(eval_vars_dict['lr']) + if (iter % self.log_every_n_iter==0 or is_last) and (iter!=0): self.model_saver.save( self.sess, @@ -956,6 +960,7 @@ if __name__=='__main__': 'answer_scores': graph.answer_inference.answer_score[0], 'accuracy': graph.moving_average_accuracy, 'total_loss': graph.total_loss, + 'lr': optimizer.learning_rate, # 'question_embed_concat': graph.question_embed_concat, # 'answer_embed_concat': graph.answers_embed_concat, # 'noun_embed': graph.noun_embed['positive_nouns'], -- GitLab