Python lasagne.init.Constant() Examples
The following are 30
code examples of lasagne.init.Constant().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
lasagne.init
, or try the search function
.
Example #1
Source File: layers.py From gogh-figure with GNU Affero General Public License v3.0 | 6 votes |
def __init__(self, incoming, num_styles=None, epsilon=1e-4, beta=Constant(0), gamma=Constant(1), **kwargs): super(InstanceNormLayer, self).__init__(incoming, **kwargs) self.axes = (2, 3) self.epsilon = epsilon if num_styles == None: shape = (self.input_shape[1],) else: shape = (num_styles, self.input_shape[1]) if beta is None: self.beta = None else: self.beta = self.add_param(beta, shape, 'beta', trainable=True, regularizable=False) if gamma is None: self.gamma = None else: self.gamma = self.add_param(gamma, shape, 'gamma', trainable=True, regularizable=True)
Example #2
Source File: layers.py From kusanagi with MIT License | 6 votes |
def __init__(self, incoming, num_units, W=init.GlorotUniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, num_leading_axes=1, p=0.5, shared_axes=(), noise_samples=None, **kwargs): super(DenseDropoutLayer, self).__init__( incoming, num_units, W, b, nonlinearity, num_leading_axes, **kwargs) self.p = p self.shared_axes = shared_axes # init randon number generator self._srng = RandomStreams(get_rng().randint(1, 2147462579)) # initialize noise samples self.noise = self.init_noise(noise_samples)
Example #3
Source File: layers_theano.py From visual_dynamics with MIT License | 6 votes |
def __init__(self, incoming, num_filters, filter_size, stride=(1, 1), pad=0, untie_biases=False, groups=1, W=init.Uniform(), b=init.Constant(0.), nonlinearity=nl.rectify, flip_filters=True, convolution=T.nnet.conv2d, filter_dilation=(1, 1), **kwargs): assert num_filters % groups == 0 self.groups = groups super(GroupConv2DLayer, self).__init__(incoming, num_filters, filter_size, stride=stride, pad=pad, untie_biases=untie_biases, W=W, b=b, nonlinearity=nonlinearity, flip_filters=flip_filters, convolution=convolution, filter_dilation=filter_dilation, **kwargs)
Example #4
Source File: custom_layers.py From acnn with GNU General Public License v3.0 | 6 votes |
def __init__(self, incomings, nfilters, nrings=5, nrays=16, W=LI.GlorotNormal(), b=LI.Constant(0.0), normalize_rings=False, normalize_input=False, take_max=True, nonlinearity=LN.rectify, **kwargs): super(GCNNLayer, self).__init__(incomings, **kwargs) # patch operator sizes self.nfilters = nfilters self.nrings = nrings self.nrays = nrays self.filter_shape = (nfilters, self.input_shapes[0][1], nrings, nrays) self.biases_shape = (nfilters, ) # path operator parameters self.normalize_rings = normalize_rings self.normalize_input = normalize_input self.take_max = take_max self.nonlinearity = nonlinearity # layer parameters: # y = Wx + b, where x are the input features and y are the output features self.W = self.add_param(W, self.filter_shape, name="W") self.b = self.add_param(b, self.biases_shape, name="b", regularizable=False)
Example #5
Source File: layers_theano.py From visual_dynamics with MIT License | 6 votes |
def __init__(self, incoming, offset=init.Constant(0), scale=init.Constant(1), shared_axes='auto', **kwargs): super(StandarizeLayer, self).__init__(incoming, **kwargs) if shared_axes == 'auto': # default: share biases over all but the second axis shared_axes = (0,) + tuple(range(2, len(self.input_shape))) elif isinstance(shared_axes, int): shared_axes = (shared_axes,) self.shared_axes = shared_axes # create offset and scale parameter, ignoring all dimensions in shared_axes shape = [size for axis, size in enumerate(self.input_shape) if axis not in self.shared_axes] if any(size is None for size in shape): raise ValueError("StandarizeLayer needs specified input sizes for " "all axes that offset and scale are not shared over.") self.offset = self.add_param(offset, shape, 'offset', regularizable=False, trainable=False) self.scale = self.add_param(scale, shape, 'scale', regularizable=False, trainable=False)
Example #6
Source File: layers_theano.py From visual_dynamics with MIT License | 6 votes |
def __init__(self, incomings, axis=1, Q=init.Normal(std=0.001), R=init.Normal(std=0.001), S=init.Normal(std=0.001), b=init.Constant(0.), **kwargs): """ axis: The first axis of Y to be lumped into a single bilinear model. The bilinear model are computed independently for each element wrt the preceding axes. """ super(BilinearLayer, self).__init__(incomings, **kwargs) assert axis >= 1 self.axis = axis self.y_shape, self.u_shape = [input_shape[1:] for input_shape in self.input_shapes] self.y_dim = int(np.prvod(self.y_shape[self.axis-1:])) self.u_dim, = self.u_shape self.Q = self.add_param(Q, (self.y_dim, self.y_dim, self.u_dim), name='Q') self.R = self.add_param(R, (self.y_dim, self.u_dim), name='R') self.S = self.add_param(S, (self.y_dim, self.y_dim), name='S') if b is None: self.b = None else: self.b = self.add_param(b, (self.y_dim,), name='b', regularizable=False)
Example #7
Source File: layers_theano.py From visual_dynamics with MIT License | 6 votes |
def __init__(self, incomings, Q=init.Normal(std=0.001), R=init.Normal(std=0.001), S=init.Normal(std=0.001), b=init.Constant(0.), **kwargs): super(BilinearChannelwiseLayer, self).__init__(incomings, **kwargs) self.y_shape, self.u_shape = [input_shape[1:] for input_shape in self.input_shapes] self.c_dim = self.y_shape[0] self.y_dim = int(np.prod(self.y_shape[1:])) self.u_dim, = self.u_shape self.Q = self.add_param(Q, (self.c_dim, self.y_dim, self.y_dim, self.u_dim), name='Q') self.R = self.add_param(R, (self.c_dim, self.y_dim, self.u_dim), name='R') self.S = self.add_param(S, (self.c_dim, self.y_dim, self.y_dim), name='S') if b is None: self.b = None else: self.b = self.add_param(b, (self.c_dim, self.y_dim), name='b', regularizable=False)
Example #8
Source File: crf.py From LasagneNLP with Apache License 2.0 | 6 votes |
def __init__(self, incoming, num_labels, mask_input=None, W=init.GlorotUniform(), b=init.Constant(0.), **kwargs): # This layer inherits from a MergeLayer, because it can have two # inputs - the layer input, and the mask. # We will just provide the layer input as incomings, unless a mask input was provided. self.input_shape = incoming.output_shape incomings = [incoming] self.mask_incoming_index = -1 if mask_input is not None: incomings.append(mask_input) self.mask_incoming_index = 1 super(CRFLayer, self).__init__(incomings, **kwargs) self.num_labels = num_labels + 1 self.pad_label_index = num_labels num_inputs = self.input_shape[2] self.W = self.add_param(W, (num_inputs, self.num_labels, self.num_labels), name="W") if b is None: self.b = None else: self.b = self.add_param(b, (self.num_labels, self.num_labels), name="b", regularizable=False)
Example #9
Source File: graph.py From LasagneNLP with Apache License 2.0 | 6 votes |
def __init__(self, incoming_vertex, incoming_edge, num_filters, filter_size, W=init.GlorotUniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, **kwargs): self.vertex_shape = incoming_vertex.output_shape self.edge_shape = incoming_edge.output_shape self.input_shape = incoming_vertex.output_shape incomings = [incoming_vertex, incoming_edge] self.vertex_incoming_index = 0 self.edge_incoming_index = 1 super(GraphConvLayer, self).__init__(incomings, **kwargs) if nonlinearity is None: self.nonlinearity = nonlinearities.identity else: self.nonlinearity = nonlinearity self.num_filters = num_filters self.filter_size = filter_size self.W = self.add_param(W, self.get_W_shape(), name="W") if b is None: self.b = None else: self.b = self.add_param(b, (num_filters,), name="b", regularizable=False)
Example #10
Source File: highway.py From LasagneNLP with Apache License 2.0 | 6 votes |
def __init__(self, incoming, W_h=init.GlorotUniform(), b_h=init.Constant(0.), W_t=init.GlorotUniform(), b_t=init.Constant(0.), nonlinearity=nonlinearities.rectify, **kwargs): super(HighwayDenseLayer, self).__init__(incoming, **kwargs) self.nonlinearity = (nonlinearities.identity if nonlinearity is None else nonlinearity) num_inputs = int(np.prod(self.input_shape[1:])) self.W_h = self.add_param(W_h, (num_inputs, num_inputs), name="W_h") if b_h is None: self.b_h = None else: self.b_h = self.add_param(b_h, (num_inputs,), name="b_h", regularizable=False) self.W_t = self.add_param(W_t, (num_inputs, num_inputs), name="W_t") if b_t is None: self.b_t = None else: self.b_t = self.add_param(b_t, (num_inputs,), name="b_t", regularizable=False)
Example #11
Source File: parser.py From LasagneNLP with Apache License 2.0 | 6 votes |
def __init__(self, incoming, num_labels, mask_input=None, W_h=init.GlorotUniform(), W_c=init.GlorotUniform(), b=init.Constant(0.), **kwargs): # This layer inherits from a MergeLayer, because it can have two # inputs - the layer input, and the mask. # We will just provide the layer input as incomings, unless a mask input was provided. self.input_shape = incoming.output_shape incomings = [incoming] self.mask_incoming_index = -1 if mask_input is not None: incomings.append(mask_input) self.mask_incoming_index = 1 super(DepParserLayer, self).__init__(incomings, **kwargs) self.num_labels = num_labels num_inputs = self.input_shape[2] # add parameters self.W_h = self.add_param(W_h, (num_inputs, self.num_labels), name='W_h') self.W_c = self.add_param(W_c, (num_inputs, self.num_labels), name='W_c') if b is None: self.b = None else: self.b = self.add_param(b, (self.num_labels,), name='b', regularizable=False)
Example #12
Source File: rotconv.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, num_filters, num_rot, filter_size, stride=(1, 1), border_mode="valid", untie_biases=False, W=init.GlorotUniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, convolution=T.nnet.conv2d, **kwargs): super(RotConv, self).__init__(incoming, **kwargs) if nonlinearity is None: self.nonlinearity = nonlinearities.identity else: self.nonlinearity = nonlinearity self.num_filters = num_filters self.num_rot = num_rot; self.filter_size = as_tuple(filter_size, 2) self.stride = as_tuple(stride, 2) self.border_mode = border_mode self.untie_biases = untie_biases self.convolution = convolution if self.border_mode not in ['valid', 'full', 'same']: raise RuntimeError("Invalid border mode: '%s'" % self.border_mode) self.W = self.add_param(W, self.get_W_shape(), name="W") if b is None: self.b = None else: if self.untie_biases: biases_shape = (num_filters, self.output_shape[2], self. output_shape[3]) else: biases_shape = (num_filters,) self.b = self.add_param(b, biases_shape, name="b", regularizable=False)
Example #13
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, beta=init.Constant(-0.01), **kwargs): super(BNRectifyThres, self).__init__(incoming, **kwargs) self.beta = self.add_param(beta, (1,), 'beta', trainable=False, regularizable=False)
Example #14
Source File: rotconv.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, num_filters, num_rot, filter_size, stride=(1, 1), border_mode="valid", untie_biases=False, W=init.GlorotUniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, convolution=T.nnet.conv2d, **kwargs): super(RotConv, self).__init__(incoming, **kwargs) if nonlinearity is None: self.nonlinearity = nonlinearities.identity else: self.nonlinearity = nonlinearity self.num_filters = num_filters self.num_rot = num_rot; self.filter_size = as_tuple(filter_size, 2) self.stride = as_tuple(stride, 2) self.border_mode = border_mode self.untie_biases = untie_biases self.convolution = convolution if self.border_mode not in ['valid', 'full', 'same']: raise RuntimeError("Invalid border mode: '%s'" % self.border_mode) self.W = self.add_param(W, self.get_W_shape(), name="W") if b is None: self.b = None else: if self.untie_biases: biases_shape = (num_filters, self.output_shape[2], self. output_shape[3]) else: biases_shape = (num_filters,) self.b = self.add_param(b, biases_shape, name="b", regularizable=False)
Example #15
Source File: rotconv.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, num_filters, num_rot, filter_size, stride=(1, 1), border_mode="valid", untie_biases=False, W=init.GlorotUniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, convolution=T.nnet.conv2d, **kwargs): super(RotConv, self).__init__(incoming, **kwargs) if nonlinearity is None: self.nonlinearity = nonlinearities.identity else: self.nonlinearity = nonlinearity self.num_filters = num_filters self.num_rot = num_rot; self.filter_size = as_tuple(filter_size, 2) self.stride = as_tuple(stride, 2) self.border_mode = border_mode self.untie_biases = untie_biases self.convolution = convolution if self.border_mode not in ['valid', 'full', 'same']: raise RuntimeError("Invalid border mode: '%s'" % self.border_mode) self.W = self.add_param(W, self.get_W_shape(), name="W") if b is None: self.b = None else: if self.untie_biases: biases_shape = (num_filters, self.output_shape[2], self. output_shape[3]) else: biases_shape = (num_filters,) self.b = self.add_param(b, biases_shape, name="b", regularizable=False)
Example #16
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, axes='auto', epsilon=1e-4, alpha=0.1, beta=init.Constant(-3.0), mean=init.Constant(0), inv_std=init.Constant(1), **kwargs): super(BatchNormSparseLayer, self).__init__(incoming, **kwargs) if axes == 'auto': # default: normalize over all but the second axis axes = (0,) + tuple(range(2, len(self.input_shape))) elif isinstance(axes, int): axes = (axes,) self.axes = axes self.epsilon = epsilon self.alpha = alpha # create parameters, ignoring all dimensions in axes shape = [size for axis, size in enumerate(self.input_shape) if axis not in self.axes] if any(size is None for size in shape): raise ValueError("BatchNormSparseLayer needs specified input sizes for " "all axes not normalized over.") self.beta = self.add_param(beta, shape, 'beta', trainable=False, regularizable=False) self.mean = self.add_param(mean, shape, 'mean', trainable=False, regularizable=False) self.inv_std = self.add_param(inv_std, shape, 'inv_std', trainable=False, regularizable=False)
Example #17
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, beta=init.Constant(-0.01), **kwargs): super(BNRectifyThres, self).__init__(incoming, **kwargs) self.beta = self.add_param(beta, (1,), 'beta', trainable=False, regularizable=False)
Example #18
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, perc=99.9, alpha=0.1, beta=init.Constant(5.0), tight=20.0, bias=0.0, **kwargs): super(SoftThresPerc, self).__init__(incoming, **kwargs); self.perc = perc; self.alpha = alpha; self.tight = tight; self.bias = bias; self.beta = self.add_param(beta, (1,), 'beta', trainable=False, regularizable=False);
Example #19
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, axes='auto', epsilon=1e-4, alpha=0.1, beta=init.Constant(0), gamma=init.Constant(1), mean=init.Constant(0), inv_std=init.Constant(1), **kwargs): super(BatchNormLayer, self).__init__(incoming, **kwargs) if axes == 'auto': # default: normalize over all but the second axis axes = (0,) + tuple(range(2, len(self.input_shape))) elif isinstance(axes, int): axes = (axes,) self.axes = axes self.epsilon = epsilon self.alpha = alpha # create parameters, ignoring all dimensions in axes shape = [size for axis, size in enumerate(self.input_shape) if axis not in self.axes] if any(size is None for size in shape): raise ValueError("BatchNormLayer needs specified input sizes for " "all axes not normalized over.") if beta is None: self.beta = None else: self.beta = self.add_param(beta, shape, 'beta', trainable=True, regularizable=False) if gamma is None: self.gamma = None else: self.gamma = self.add_param(gamma, shape, 'gamma', trainable=True, regularizable=True) self.mean = self.add_param(mean, shape, 'mean', trainable=False, regularizable=False) self.inv_std = self.add_param(inv_std, shape, 'inv_std', trainable=False, regularizable=False)
Example #20
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, beta=init.Constant(-0.01), **kwargs): super(BNRectifyThres, self).__init__(incoming, **kwargs) self.beta = self.add_param(beta, (1,), 'beta', trainable=False, regularizable=False)
Example #21
Source File: rotconv.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, num_filters, num_rot, filter_size, stride=(1, 1), border_mode="valid", untie_biases=False, W=init.GlorotUniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, convolution=T.nnet.conv2d, **kwargs): super(RotConv, self).__init__(incoming, **kwargs) if nonlinearity is None: self.nonlinearity = nonlinearities.identity else: self.nonlinearity = nonlinearity self.num_filters = num_filters self.num_rot = num_rot; self.filter_size = as_tuple(filter_size, 2) self.stride = as_tuple(stride, 2) self.border_mode = border_mode self.untie_biases = untie_biases self.convolution = convolution if self.border_mode not in ['valid', 'full', 'same']: raise RuntimeError("Invalid border mode: '%s'" % self.border_mode) self.W = self.add_param(W, self.get_W_shape(), name="W") if b is None: self.b = None else: if self.untie_biases: biases_shape = (num_filters, self.output_shape[2], self. output_shape[3]) else: biases_shape = (num_filters,) self.b = self.add_param(b, biases_shape, name="b", regularizable=False)
Example #22
Source File: rotconv.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, num_filters, num_rot, filter_size, stride=(1, 1), border_mode="valid", untie_biases=False, W=init.GlorotUniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, convolution=T.nnet.conv2d, **kwargs): super(RotConv, self).__init__(incoming, **kwargs) if nonlinearity is None: self.nonlinearity = nonlinearities.identity else: self.nonlinearity = nonlinearity self.num_filters = num_filters self.num_rot = num_rot; self.filter_size = as_tuple(filter_size, 2) self.stride = as_tuple(stride, 2) self.border_mode = border_mode self.untie_biases = untie_biases self.convolution = convolution if self.border_mode not in ['valid', 'full', 'same']: raise RuntimeError("Invalid border mode: '%s'" % self.border_mode) self.W = self.add_param(W, self.get_W_shape(), name="W") if b is None: self.b = None else: if self.untie_biases: biases_shape = (num_filters, self.output_shape[2], self. output_shape[3]) else: biases_shape = (num_filters,) self.b = self.add_param(b, biases_shape, name="b", regularizable=False)
Example #23
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, perc=99.9, alpha=0.1, beta=init.Constant(5.0), tight=20.0, bias=0.0, **kwargs): super(SoftThresPerc, self).__init__(incoming, **kwargs); self.perc = perc; self.alpha = alpha; self.tight = tight; self.bias = bias; self.beta = self.add_param(beta, (1,), 'beta', trainable=False, regularizable=False);
Example #24
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, perc=99.9, alpha=0.1, beta=init.Constant(5.0), **kwargs): super(BNRectifyPerc, self).__init__(incoming, **kwargs); self.perc = perc; self.alpha = alpha; self.beta = self.add_param(beta, (1,), 'beta', trainable=False, regularizable=False);
Example #25
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, axes='auto', epsilon=1e-4, alpha=0.1, beta=init.Constant(0), gamma=init.Constant(1), mean=init.Constant(0), inv_std=init.Constant(1), **kwargs): super(BatchNormLayer, self).__init__(incoming, **kwargs) if axes == 'auto': # default: normalize over all but the second axis axes = (0,) + tuple(range(2, len(self.input_shape))) elif isinstance(axes, int): axes = (axes,) self.axes = axes self.epsilon = epsilon self.alpha = alpha # create parameters, ignoring all dimensions in axes shape = [size for axis, size in enumerate(self.input_shape) if axis not in self.axes] if any(size is None for size in shape): raise ValueError("BatchNormLayer needs specified input sizes for " "all axes not normalized over.") if beta is None: self.beta = None else: self.beta = self.add_param(beta, shape, 'beta', trainable=True, regularizable=False) if gamma is None: self.gamma = None else: self.gamma = self.add_param(gamma, shape, 'gamma', trainable=True, regularizable=True) self.mean = self.add_param(mean, shape, 'mean', trainable=False, regularizable=False) self.inv_std = self.add_param(inv_std, shape, 'inv_std', trainable=False, regularizable=False)
Example #26
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def batch_nmsp(layer, beta=init.Constant(-3.0), **kwargs): nonlinearity = getattr(layer, 'nonlinearity', None) if nonlinearity is not None: layer.nonlinearity = nonlinearities.identity if hasattr(layer, 'b') and layer.b is not None: del layer.params[layer.b] layer.b = None layer = BatchNormSparseLayer(layer, beta=beta, **kwargs) if nonlinearity is not None: from lasagne.layers import NonlinearityLayer layer = NonlinearityLayer(layer, nonlinearity) return layer
Example #27
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, axes='auto', epsilon=1e-4, alpha=0.1, beta=init.Constant(-3.0), mean=init.Constant(0), inv_std=init.Constant(1), **kwargs): super(BatchNormSparseLayer, self).__init__(incoming, **kwargs) if axes == 'auto': # default: normalize over all but the second axis axes = (0,) + tuple(range(2, len(self.input_shape))) elif isinstance(axes, int): axes = (axes,) self.axes = axes self.epsilon = epsilon self.alpha = alpha # create parameters, ignoring all dimensions in axes shape = [size for axis, size in enumerate(self.input_shape) if axis not in self.axes] if any(size is None for size in shape): raise ValueError("BatchNormSparseLayer needs specified input sizes for " "all axes not normalized over.") self.beta = self.add_param(beta, shape, 'beta', trainable=False, regularizable=False) self.mean = self.add_param(mean, shape, 'mean', trainable=False, regularizable=False) self.inv_std = self.add_param(inv_std, shape, 'inv_std', trainable=False, regularizable=False)
Example #28
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, perc=99.9, alpha=0.1, beta=init.Constant(5.0), tight=20.0, bias=0.0, **kwargs): super(SoftThresPerc, self).__init__(incoming, **kwargs); self.perc = perc; self.alpha = alpha; self.tight = tight; self.bias = bias; self.beta = self.add_param(beta, (1,), 'beta', trainable=False, regularizable=False);
Example #29
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, perc=99.9, alpha=0.1, beta=init.Constant(5.0), **kwargs): super(BNRectifyPerc, self).__init__(incoming, **kwargs); self.perc = perc; self.alpha = alpha; self.beta = self.add_param(beta, (1,), 'beta', trainable=False, regularizable=False);
Example #30
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, incoming, beta=init.Constant(-0.01), **kwargs): super(BNRectifyThres, self).__init__(incoming, **kwargs) self.beta = self.add_param(beta, (1,), 'beta', trainable=False, regularizable=False)