Python lasagne.objectives.binary_crossentropy() Examples

The following are 5 code examples of lasagne.objectives.binary_crossentropy(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module lasagne.objectives , or try the search function .
Example #1
Source File: AED_train.py    From AcousticEventDetection with MIT License 5 votes vote down vote up
def calc_loss_multi(prediction, targets):
    
    #we need to clip predictions when calculating the log-loss
    prediction = T.clip(prediction, 0.0000001, 0.9999999)

    #binary crossentropy is the best choice for a multi-class sigmoid output
    loss = T.mean(objectives.binary_crossentropy(prediction, targets))
    
    return loss

#theano variable for the class targets 
Example #2
Source File: vardelay_utils.py    From recurrent-memory with GNU Lesser General Public License v3.0 5 votes vote down vote up
def build_loss(pred_var, target_var, resp_dur, t_ind):
    if t_ind==0 or t_ind==1 or t_ind==4:
        loss = T.mean(T.mod(T.abs_(pred_var[:, -resp_dur:, :] - target_var[:, -resp_dur:, :]), np.pi))
    elif t_ind==2 or t_ind==6 or t_ind==8:
        loss = T.mean(binary_crossentropy(pred_var[:,-resp_dur:,-1], target_var[:,-resp_dur:,-1]))
    return loss 
Example #3
Source File: birdCLEF_train.py    From BirdCLEF2017 with MIT License 5 votes vote down vote up
def calc_loss_multi(prediction, targets):
    
    #we need to clip predictions when calculating the log-loss
    prediction = T.clip(prediction, 0.0000001, 0.9999999)

    #binary crossentropy is the best choice for a multi-class sigmoid output
    loss = T.mean(objectives.binary_crossentropy(prediction, targets))
    
    return loss

#theano variable for the class targets 
Example #4
Source File: updates.py    From Deep-SVDD with MIT License 4 votes vote down vote up
def compile_update_softmax(nnet, inputs, targets):
    """
    create a softmax loss for network given in argument
    """

    floatX = Cfg.floatX
    C = Cfg.C

    final_layer = nnet.all_layers[-1]
    trainable_params = lasagne.layers.get_all_params(final_layer, trainable=True)

    # Regularization
    if Cfg.weight_decay:
        l2_penalty = (floatX(0.5) / C) * get_l2_penalty(nnet)
    else:
        l2_penalty = T.cast(0, dtype='floatX')

    # Backpropagation
    prediction = lasagne.layers.get_output(final_layer, inputs=inputs, deterministic=False)

    if Cfg.ad_experiment:
        train_loss = T.mean(l_objectives.binary_crossentropy(prediction.flatten(), targets), dtype='floatX')
        train_acc = T.mean(l_objectives.binary_accuracy(prediction.flatten(), targets), dtype='floatX')
    else:
        train_loss = T.mean(l_objectives.categorical_crossentropy(prediction, targets), dtype='floatX')
        train_acc = T.mean(T.eq(T.argmax(prediction, axis=1), targets), dtype='floatX')

    train_obj = T.cast(train_loss + l2_penalty, dtype='floatX')
    updates = get_updates(nnet, train_obj, trainable_params, solver=nnet.solver)
    nnet.backprop = theano.function([inputs, targets], [train_obj, train_acc], updates=updates)

    # Forwardpropagation
    test_prediction = lasagne.layers.get_output(final_layer, inputs=inputs, deterministic=True)

    if Cfg.ad_experiment:
        test_loss = T.mean(l_objectives.binary_crossentropy(test_prediction.flatten(), targets), dtype='floatX')
        test_acc = T.mean(l_objectives.binary_accuracy(test_prediction.flatten(), targets), dtype='floatX')
    else:
        test_loss = T.mean(l_objectives.categorical_crossentropy(test_prediction, targets), dtype='floatX')
        test_acc = T.mean(T.eq(T.argmax(test_prediction, axis=1), targets), dtype='floatX')

    test_obj = T.cast(test_loss + l2_penalty, dtype='floatX')
    nnet.forward = theano.function([inputs, targets], [test_obj, test_acc, test_prediction, l2_penalty, test_loss]) 
Example #5
Source File: updates.py    From Deep-SVDD with MIT License 4 votes vote down vote up
def create_autoencoder(nnet):
    """
    create autoencoder Theano update for network given in argument
    """

    floatX = Cfg.floatX
    C = Cfg.ae_C
    ndim = nnet.data._X_train.ndim

    if ndim == 2:
        inputs = T.matrix('inputs')
    elif ndim == 4:
        inputs = T.tensor4('inputs')

    final_layer = nnet.all_layers[-1]

    # Backpropagation
    trainable_params = lasagne.layers.get_all_params(final_layer, trainable=True)
    prediction = lasagne.layers.get_output(final_layer, inputs=inputs, deterministic=False)

    # use l2 or binary crossentropy loss (features are scaled to [0,1])
    if Cfg.ae_loss == "l2":
        loss = lasagne.objectives.squared_error(prediction, inputs)
    if Cfg.ae_loss == "ce":
        loss = lasagne.objectives.binary_crossentropy(prediction, inputs)

    scores = T.sum(loss, axis=range(1, ndim), dtype='floatX')
    loss = T.mean(scores)

    # Regularization
    if Cfg.ae_weight_decay:
        l2_penalty = (floatX(0.5) / C) * regularize_network_params(final_layer, l2)
    else:
        l2_penalty = T.cast(0, dtype='floatX')

    train_obj = loss + l2_penalty
    updates = get_updates(nnet, train_obj, trainable_params, solver=nnet.ae_solver)
    nnet.ae_backprop = theano.function([inputs], [loss, l2_penalty, scores], updates=updates)

    # Forwardpropagation
    test_prediction = lasagne.layers.get_output(final_layer, inputs=inputs, deterministic=True)

    # use l2 or binary crossentropy loss (features are scaled to [0,1])
    if Cfg.ae_loss == "l2":
        test_loss = lasagne.objectives.squared_error(test_prediction, inputs)
    if Cfg.ae_loss == "ce":
        test_loss = lasagne.objectives.binary_crossentropy(test_prediction, inputs)

    test_scores = T.sum(test_loss, axis=range(1, ndim), dtype='floatX')
    test_loss = T.mean(test_scores)

    nnet.ae_forward = theano.function([inputs],
                                      [test_loss, l2_penalty, test_scores, test_prediction])