Python lasagne.updates() Examples
The following are 30
code examples of lasagne.updates().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
lasagne
, or try the search function
.
Example #1
Source File: deep_conv_classification_alt48_luad10in20_brca10.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #2
Source File: deep_conv_classification_alt48maxp_luad10_luad10in20_brca10x1.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #3
Source File: deep_conv_classification_alt45.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #4
Source File: deep_conv_classification_alt48_luad10_luad10in20_brca10x2.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #5
Source File: deep_conv_classification_alt54.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #6
Source File: deep_conv_classification_alt41.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #7
Source File: deep_conv_classification_alt38.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #8
Source File: deep_conv_classification_alt32.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #9
Source File: deep_conv_classification_alt36_deploy.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #10
Source File: deep_conv_classification_alt49.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #11
Source File: deep_conv_classification_alt59.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #12
Source File: deep_conv_classification_alt46.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #13
Source File: deep_conv_classification_alt48.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #14
Source File: deep_conv_classification_alt44.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #15
Source File: deep_conv_classification_alt48_luad10_skcm10_lr0.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #16
Source File: deep_conv_classification_alt62.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #17
Source File: deep_conv_classification_alt48_adeno_prad_t1_heatmap.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #18
Source File: deep_conv_classification_lpatch_alt2.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #19
Source File: deep_conv_classification_alt48_only_skcm_t0.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #20
Source File: deep_conv_classification_alt48maxp_luad10_luad10in20_brca10x2.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #21
Source File: deep_conv_classification_lpatch_alt0.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #22
Source File: deep_conv_classification_alt36.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #23
Source File: deep_conv_classification_alt61.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #24
Source File: deep_conv_classification_alt48_luad10_skcm10_v0.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #25
Source File: deep_conv_classification_alt29.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #26
Source File: deep_conv_classification_alt48_heatmap_only_melanoma.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #27
Source File: deep_conv_classification_lpatch_alt3.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #28
Source File: deep_conv_classification_alt36-sp-cnn.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #29
Source File: deep_conv_classification_alt27.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;
Example #30
Source File: deep_conv_classification_alt28.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 6 votes |
def make_training_functions(network, new_params, input_var, aug_var, target_var): output = lasagne.layers.get_output(network, deterministic=True, batch_norm_use_averages=True, batch_norm_update_averages=False); loss = lasagne.objectives.binary_crossentropy(output, target_var).mean(); deter_output = lasagne.layers.get_output(network, deterministic=True); deter_loss = lasagne.objectives.binary_crossentropy(deter_output, target_var).mean(); params = layers.get_all_params(network, trainable=True); updates = lasagne.updates.nesterov_momentum(loss, params, learning_rate=LearningRate, momentum=0.985); new_params_updates = lasagne.updates.nesterov_momentum(loss, new_params, learning_rate=LearningRate, momentum=0.985); val_fn = theano.function([input_var, aug_var, target_var], [deter_loss, deter_output]); train_fn = theano.function([input_var, aug_var, target_var], loss, updates=updates); new_params_train_fn = theano.function([input_var, aug_var, target_var], loss, updates=new_params_updates); return train_fn, new_params_train_fn, val_fn;