Skip to content
Snippets Groups Projects
Commit fed5bca0 authored by tgupta6's avatar tgupta6
Browse files

relevance visualization

parent b5f5838f
No related branches found
No related tags found
No related merge requests found
...@@ -37,14 +37,50 @@ class AnswerInference(): ...@@ -37,14 +37,50 @@ class AnswerInference():
reuse_vars = False reuse_vars = False
else: else:
reuse_vars = True reuse_vars = True
num_regions = self.object_feat[j].get_shape()[0].value
q_feat = tf.reshape(
self.question_vert_concat[j],
[1, -1])
q_feat = tf.expand_dims(q_feat,0)
q_feat = tf.tile(
q_feat,
[num_regions, self.num_answers, 1])
a_feat = tf.expand_dims(
self.answers_vert_concat[j],
0)
a_feat = tf.tile(
a_feat,
[num_regions, 1, 1])
obj_atr_qa_feat = tf.concat(
2,
[self.obj_atr_qa_elementwise_prod[j], q_feat, a_feat])
obj_atr_qa_feat = tf.expand_dims(
obj_atr_qa_feat,
0)
print obj_atr_qa_feat.get_shape()
self.per_region_answer_scores[j] = layers.conv2d( self.per_region_answer_scores[j] = layers.conv2d(
tf.expand_dims( obj_atr_qa_feat,
self.obj_atr_qa_elementwise_prod[j], 1,
0), 2500,
'per_region_ans_score_conv_1',
func = tf.nn.relu,
reuse_vars = reuse_vars)
self.per_region_answer_scores[j] = layers.conv2d(
self.per_region_answer_scores[j],
1, 1,
1, 1,
'per_region_ans_score_conv', 'per_region_ans_score_conv_2',
func = None, func = None,
reuse_vars = reuse_vars) reuse_vars = reuse_vars)
......
...@@ -5,7 +5,7 @@ def mkdir_if_not_exists(dir_name): ...@@ -5,7 +5,7 @@ def mkdir_if_not_exists(dir_name):
if not os.path.exists(dir_name): if not os.path.exists(dir_name):
os.mkdir(dir_name) os.mkdir(dir_name)
experiment_name = 'QA_classifier_wordvec_xform' #'QA_joint_pretrain_genome_split' experiment_name = 'QA_classifier_joint_pretrain_wordvec_xform' #'QA_joint_pretrain_genome_split'
# Global output directory (all subexperiments will be saved here) # Global output directory (all subexperiments will be saved here)
global_output_dir = '/home/tanmay/Code/GenVQA/Exp_Results/VQA' global_output_dir = '/home/tanmay/Code/GenVQA/Exp_Results/VQA'
...@@ -195,7 +195,7 @@ answer_obj_atr_loss_wt = 0.1 ...@@ -195,7 +195,7 @@ answer_obj_atr_loss_wt = 0.1
answer_regularization_coeff = 1e-5 answer_regularization_coeff = 1e-5
answer_queue_size = 500 answer_queue_size = 500
answer_embedding_dim = 600 answer_embedding_dim = 600
answer_lr = 1e-4 answer_lr = 1e-3
answer_log_every_n_iter = 500 answer_log_every_n_iter = 500
answer_output_dir = os.path.join( answer_output_dir = os.path.join(
global_experiment_dir, global_experiment_dir,
...@@ -220,7 +220,7 @@ answer_fine_tune_from = answer_model + '-' + str(answer_fine_tune_from_iter) ...@@ -220,7 +220,7 @@ answer_fine_tune_from = answer_model + '-' + str(answer_fine_tune_from_iter)
# Answer eval params # Answer eval params
answer_eval_on = 'val' answer_eval_on = 'val'
answer_model_to_eval = answer_model + '-42000' answer_model_to_eval = answer_model + '-20000'
vqa_results_dir = os.path.join( vqa_results_dir = os.path.join(
answer_output_dir, answer_output_dir,
......
...@@ -200,7 +200,7 @@ if __name__=='__main__': ...@@ -200,7 +200,7 @@ if __name__=='__main__':
'mscoco_val2014_annotations_with_parsed_questions.json') 'mscoco_val2014_annotations_with_parsed_questions.json')
exp_dir = '/home/tanmay/Code/GenVQA/Exp_Results/VQA/' + \ exp_dir = '/home/tanmay/Code/GenVQA/Exp_Results/VQA/' + \
'QA_joint_pretrain_genome_split/' 'QA_classifier_wordvec_xform/'
eval_data_json = os.path.join( eval_data_json = os.path.join(
exp_dir, exp_dir,
...@@ -219,14 +219,4 @@ if __name__=='__main__': ...@@ -219,14 +219,4 @@ if __name__=='__main__':
data_type) data_type)
rel_vis.write_html() rel_vis.write_html()
# key = '5289770'
# keys = rel_vis.eval_data.keys()
# for key in keys:
# question = rel_vis.anno_data[key]['question']
# answer = rel_vis.anno_data[key]['multiple_choice_answer']
# print 'Q: ' + question
# print 'GT A: ' + answer
# _, rel, ans, score = rel_vis.create_relevance_map(key,mode='pred')
# print 'Pred A: ' + ans
# imgplot = image_io.imshow2(rel)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment