Skip to content
Snippets Groups Projects
losses.py 3.43 KiB
Newer Older
import tensorflow as tf


def object_loss(scores, labels):
    with tf.variable_scope('object_loss'):
        loss_vector = tf.nn.softmax_cross_entropy_with_logits(
            scores,
            labels,
            name='softmax_cross_entropy_with_logits')

        loss = tf.reduce_mean(
            loss_vector, 
            name='average_loss')

    return loss


def attribute_loss(scores, labels, batch_size):
    with tf.variable_scope('attribute_loss'):
        loss_matrix = tf.nn.sigmoid_cross_entropy_with_logits(
            scores,
            labels,
            name='sigmoid_cross_entropy_with_logits')

        label_count = tf.reduce_sum(
            labels, 
            0,
            keep_dims=True,
            name='label_count')
        batch_size = tf.constant(batch_size, dtype=tf.float32)
        w1 = ((1.0-labels)*label_count/batch_size)
        w2 = (labels*(batch_size-label_count)/batch_size)
        w = w1 + w2
        loss = tf.reduce_mean(
def answer_loss(scores, labels):
    with tf.variable_scope('answer_loss'):
        return margin_loss(labels, scores, 1.0)
def regularization_loss(param_list, coeff):
    regularizer = tf.zeros(shape=[])
    for param in param_list:
        regularizer += tf.nn.l2_loss(param) 
    return coeff*regularizer

def margin_loss(y, y_pred, margin):
    correct_score = tf.reduce_sum(tf.mul(y, y_pred), 1, 
                                  keep_dims=True, name='correct_score')
    return tf.reduce_mean(tf.maximum(0.0, y_pred + margin - correct_score))


def multilabel_margin_loss(y, y_pred, margin, num_samples):
    y_list = tf.unpack(y, num_samples)
    y_pred_list = tf.unpack(y_pred, num_samples)
tgupta6's avatar
tgupta6 committed
    loss = 0.0
    for i in xrange(num_samples):
        y_ = y_list[i]
        y_pred_ = y_pred_list[i]
        k = tf.reduce_sum(y_)
        loss += tf.cond(
            k > 0.5,
            lambda: multilabel_margin_loss_inner(y_,y_pred_,margin),
            lambda: tf.constant(0.0))
    loss /= float(num_samples)
tgupta6's avatar
tgupta6 committed
    return loss
tgupta6's avatar
tgupta6 committed
    
def multilabel_margin_loss_inner(y_,y_pred_,margin):
    partition_ids = tf.cast(y_>0.5,tf.int32)
    partition = tf.dynamic_partition(y_pred_, partition_ids, 2)
    pos_labels_scores = tf.expand_dims(partition[1],1)
    neg_labels_scores = partition[0]
    margin_violation = tf.maximum(
        0.0, neg_labels_scores + margin - pos_labels_scores)
    return tf.reduce_mean(margin_violation)


def mil_loss_prob(scores, y, type='obj', epsilon=1e-5):
    if type=='obj':
        log_prob = tf.nn.log_softmax(scores)
    elif type=='atr':
        log_prob = tf.log(tf.maximum(epsilon, tf.nn.sigmoid(scores)))

    max_region_scores = tf.reduce_max(log_prob*y,0)
    loss = -tf.reduce_sum(max_region_scores)/tf.maximum(tf.reduce_sum(y),epsilon)
    return loss
def mil_loss(scores, y, type='obj', epsilon=1e-5):
    if type=='obj':
        log_prob = scores
    elif type=='atr':
        log_prob = tf.log(tf.maximum(epsilon, tf.nn.sigmoid(scores)))

    max_region_scores = tf.minimum(tf.reduce_max(log_prob*y,0)-1.0,0.0)
    loss = -tf.reduce_mean(max_region_scores)#/tf.maximum(tf.reduce_sum(y),epsilon)
    return loss

if __name__=='__main__':
    scores = tf.constant([[0.2, 0.3, 0.7],[0.8, 0.2, 0.9]])
    labels = tf.constant([[1.0, 0.0, 0.0],[0.0, 1.0, 0.0]])
    loss = attribute_loss(scores, labels)
    sess = tf.InteractiveSession()
    with sess.as_default():
        print loss.eval()