Skip to content
Snippets Groups Projects
Commit 582c88c7 authored by tgupta6's avatar tgupta6
Browse files

change mil loss to use scores instead of softmax prob

parent cf15ccfa
No related branches found
No related tags found
No related merge requests found
......@@ -84,7 +84,7 @@ def multilabel_margin_loss_inner(y_,y_pred_,margin):
return tf.reduce_mean(margin_violation)
def mil_loss(scores, y, type='obj', epsilon=1e-5):
def mil_loss_prob(scores, y, type='obj', epsilon=1e-5):
if type=='obj':
log_prob = tf.nn.log_softmax(scores)
elif type=='atr':
......@@ -94,6 +94,16 @@ def mil_loss(scores, y, type='obj', epsilon=1e-5):
loss = -tf.reduce_sum(max_region_scores)/tf.maximum(tf.reduce_sum(y),epsilon)
return loss
def mil_loss(scores, y, type='obj', epsilon=1e-5):
if type=='obj':
log_prob = scores
elif type=='atr':
log_prob = tf.log(tf.maximum(epsilon, tf.nn.sigmoid(scores)))
max_region_scores = tf.minimum(tf.reduce_max(log_prob*y,0)-1.0,0.0)
loss = -tf.reduce_mean(max_region_scores)#/tf.maximum(tf.reduce_sum(y),epsilon)
return loss
if __name__=='__main__':
scores = tf.constant([[0.2, 0.3, 0.7],[0.8, 0.2, 0.9]])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment