Skip to content
Snippets Groups Projects
Commit 4aa6f545 authored by tgupta6's avatar tgupta6
Browse files

make data reader single threaded

parent d016894c
No related branches found
No related tags found
No related merge requests found
......@@ -8,7 +8,7 @@ def mkdir_if_not_exists(dir_name):
experiment_name = 'QA_explicit_dot_joint_training_pretrained_same_lr'
#experiment_name = 'object_attribute_classifier_large_images'
# Global output directory (all subexperiments will be saved here)
global_output_dir = '/home/tanmay/Code/GenVQA/Exp_Results/VQA'
global_output_dir = '/data/tanmay/GenVQA_Exp_Results'
global_experiment_dir = os.path.join(
global_output_dir,
......
......@@ -8,7 +8,7 @@ def mkdir_if_not_exists(dir_name):
experiment_name = 'QA_explicit_dot_pretrained_same_lr'
#experiment_name = 'object_attribute_classifier_large_images'
# Global output directory (all subexperiments will be saved here)
global_output_dir = '/home/nfs/tgupta6/projects/GenVQA/Exp_Results/VQA'
global_output_dir = '/data/tanmay/GenVQA_Exp_Results'
global_experiment_dir = os.path.join(
global_output_dir,
......@@ -164,7 +164,7 @@ answer_obj_atr_loss_wt = 0.0
answer_regularization_coeff = 1e-5
answer_queue_size = 500
answer_embedding_dim = 600
answer_lr = 1e-3
answer_lr = 1e-4
answer_log_every_n_iter = 500
answer_output_dir = os.path.join(
global_experiment_dir,
......@@ -184,7 +184,7 @@ answer_model = os.path.join(
num_regions_with_labels = 100
# Answer fine tune params
answer_fine_tune_from_iter = 62500
answer_fine_tune_from_iter = 19500
answer_fine_tune_from = answer_model + '-' + str(answer_fine_tune_from_iter)
# Answer eval params
......
......@@ -87,20 +87,22 @@ class data():
print 'Error in thread {}: {}'.format(
threading.current_thread().name, str(e))
def get_parallel(self, samples):
batch_list = [None]*len(samples)
worker_ids = range(len(samples))
workers = []
for count, sample in enumerate(samples):
worker = threading.Thread(
target = self.get_single,
args = (sample, batch_list, worker_ids[count]))
worker.setDaemon(True)
worker.start()
workers.append(worker)
self.get_single(sample, batch_list, worker_ids[count])
# worker = threading.Thread(
# target = self.get_single,
# args = (sample, batch_list, worker_ids[count]))
# worker.setDaemon(True)
# worker.start()
# workers.append(worker)
for worker in workers:
worker.join()
# for worker in workers:
# worker.join()
batch_size = len(samples)
batch = dict()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment