Skip to content
Snippets Groups Projects
Commit 2216f3bc authored by tgupta6's avatar tgupta6
Browse files

allow fine_tune to begin with decayed learning rate

parent 726009a2
No related branches found
No related tags found
No related merge requests found
......@@ -81,25 +81,27 @@ if __name__=='__main__':
constants.answer_batch_size,
constants.image_size,
constants.num_negative_answers,
constants.answer_embedding_dim,
constants.answer_regularization_coeff,
constants.answer_batch_size*constants.num_region_proposals,
constants.num_regions_with_labels,
constants.num_object_labels,
constants.num_attribute_labels,
constants.answer_obj_atr_loss_wt,
constants.answer_ans_loss_wt,
constants.answer_mil_loss_wt,
resnet_feat_dim=constants.resnet_feat_dim,
training=True)
print 'Attaching optimizer...'
optimizer = train.attach_optimizer(
graph,
constants.answer_lr)
constants.answer_lr,
start_iter=constants.answer_fine_tune_from_iter)
print 'Starting a session...'
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.gpu_options.per_process_gpu_memory_fraction = 0.8
config.gpu_options.per_process_gpu_memory_fraction = 0.9
sess = tf.Session(config=config, graph=graph.tf_graph)
print 'Creating initializer...'
......@@ -124,6 +126,7 @@ if __name__=='__main__':
'answer_scores': graph.answer_inference.answer_score[0],
'accuracy': graph.moving_average_accuracy,
'total_loss': graph.total_loss,
'lr': optimizer.learning_rate,
# 'question_embed_concat': graph.question_embed_concat,
# 'answer_embed_concat': graph.answers_embed_concat,
# 'noun_embed': graph.noun_embed['positive_nouns'],
......
......@@ -385,9 +385,10 @@ class graph_creator():
if self.training:
self.object_loss = self.obj_atr_loss_wt*losses.multilabel_margin_loss(
self.object_scores_with_labels,
self.plh['object_labels'],
1.0)
self.object_scores_with_labels,
1.0,
self.num_regions_w_labels)
# self.object_loss = self.obj_atr_loss_wt*losses.object_loss(
# #self.obj_atr_inference.object_scores,
......@@ -669,7 +670,7 @@ def create_batch_generator():
class attach_optimizer():
def __init__(self, graph, lr, decay_step=24000, decay_rate=0.5):
def __init__(self, graph, lr, decay_step=24000, decay_rate=0.5, start_iter=0):
self.graph = graph
self.lr = lr
self.decay_step = decay_step
......@@ -691,7 +692,7 @@ class attach_optimizer():
all_vars = tf.all_variables()
self.ops = dict()
self.global_step = tf.Variable(0, trainable=False)
self.global_step = tf.Variable(start_iter, trainable=False)
self.learning_rate = tf.train.exponential_decay(
self.lr,
self.global_step,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment