Skip to content
Snippets Groups Projects
Commit b6506c90 authored by tgupta6's avatar tgupta6
Browse files

word vector transformation works for obj atr classification

parent dc2d44a6
No related branches found
No related tags found
No related merge requests found
......@@ -5,7 +5,7 @@ def mkdir_if_not_exists(dir_name):
if not os.path.exists(dir_name):
os.mkdir(dir_name)
experiment_name = 'object_attribute_classifier_large_images_vqa_split' #'QA_joint_pretrain_genome_split'
experiment_name = 'object_attribute_classifier_wordvec_xform' #'QA_joint_pretrain_genome_split'
# Global output directory (all subexperiments will be saved here)
global_output_dir = '/home/tanmay/Code/GenVQA/Exp_Results/VQA'
......@@ -117,7 +117,7 @@ region_model = os.path.join(
'model')
# Object Attribute Finetuning Params
region_fine_tune_from_iter = 34000
region_fine_tune_from_iter = 50500
region_fine_tune_from = region_model + '-' + str(region_fine_tune_from_iter)
# Object Attribute Model Selection
......@@ -129,7 +129,7 @@ region_model_accuracies_txt = os.path.join(
# Object Attribute Classifier Evaluation Params
region_eval_on = 'train_held_out' # One of {'test','train_held_out','train_subset'}
region_model_to_eval = region_model + '-' + '80000'
region_model_to_eval = region_model + '-' + '102000'
region_attribute_scores_dirname = os.path.join(
region_output_dir,
......
......@@ -29,16 +29,37 @@ class word_vector_manager():
with tf.variable_scope('attribute_label_word_vectors'):
self.attribute_label_word_vectors()
def word_vec_transformation(self, word_vecs):
with tf.variable_scope('word_vec_transform'):
word_vecs_fc1 = layers.full(
word_vecs,
constants.word_vector_size,
'word_vec_fc1')
word_vecs_fc2 = layers.full(
word_vecs_fc1,
constants.word_vector_size,
'word_vec_fc2',
func = None)
return word_vecs_fc2
def init_word_vector_tensor(self):
# with tf.device('/cpu:0'):
self.word_vectors = tf.get_variable(
name = 'word_vector',
shape = [self.vocab_size, constants.word_vector_size],
initializer = tf.constant_initializer(self.vocab_word_vectors))
# self.word_vectors_ = tf.get_variable(
# name = 'word_vector',
# shape = [self.vocab_size, constants.word_vector_size],
# initializer = tf.constant_initializer(self.vocab_word_vectors))
self.pretrained_word_vectors = tf.constant(
self.vocab_word_vectors,
name = 'pretrained_word_vectors'
)
self.word_vectors = self.word_vec_transformation(
self.pretrained_word_vectors)
tf.add_to_collection('to_regularize', self.word_vectors)
#tf.add_to_collection('to_regularize', self.word_vectors)
def read_object_labels(self):
with open(constants.object_labels_json, 'r') as file:
......@@ -88,6 +109,7 @@ class word_vector_manager():
self.object_label_vectors = tf.concat(
0, object_label_vector_list)
def attribute_label_word_vectors(self):
inv_attribute_labels = {v: k for k, v in self.attribute_labels.items()}
num_attribute_labels = len(inv_attribute_labels)
......@@ -101,6 +123,9 @@ class word_vector_manager():
self.attribute_label_vectors = tf.concat(
0, attribute_label_vector_list)
def get_trainable_variables():
return var_collect.collect_scope('word_vec_transform')
if __name__=='__main__':
word_vector_mgr = word_vector_manager()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment