Python keras.regularizers() Examples
The following are 3
code examples of keras.regularizers().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras
, or try the search function
.
Example #1
Source File: timedistributed.py From fancy-cnn with MIT License | 6 votes |
def build(self): try: self.input_ndim = len(self.previous.input_shape) except AttributeError: self.input_ndim = len(self.input_shape) self.layer.set_input_shape((None, ) + self.input_shape[2:]) if hasattr(self.layer, 'regularizers'): self.regularizers = self.layer.regularizers if hasattr(self.layer, 'constraints'): self.constraints = self.layer.constraints if hasattr(self.layer, 'trainable_weights'): self.trainable_weights = self.layer.trainable_weights if self.initial_weights is not None: self.layer.set_weights(self.initial_weights) del self.initial_weights
Example #2
Source File: attentionlayer.py From recurrent-attention-for-QA-SQUAD-based-on-keras with MIT License | 5 votes |
def __init__(self, h, output_dim, init='glorot_uniform', **kwargs): self.init = initializations.get(init) self.h = h self.output_dim = output_dim #removing the regularizers and the dropout super(AttenLayer, self).__init__(**kwargs) # this seems necessary in order to accept 3 input dimensions # (samples, timesteps, features) self.input_spec=[InputSpec(ndim=3)]
Example #3
Source File: architectures.py From DeepIV with MIT License | 5 votes |
def feed_forward_net(input, output, hidden_layers=[64, 64], activations='relu', dropout_rate=0., l2=0., constrain_norm=False): ''' Helper function for building a Keras feed forward network. input: Keras Input object appropriate for the data. e.g. input=Input(shape=(20,)) output: Function representing final layer for the network that maps from the last hidden layer to output. e.g. if output = Dense(10, activation='softmax') if we're doing 10 class classification or output = Dense(1, activation='linear') if we're doing regression. ''' state = input if isinstance(activations, str): activations = [activations] * len(hidden_layers) for h, a in zip(hidden_layers, activations): if l2 > 0.: w_reg = keras.regularizers.l2(l2) else: w_reg = None const = maxnorm(2) if constrain_norm else None state = Dense(h, activation=a, kernel_regularizer=w_reg, kernel_constraint=const)(state) if dropout_rate > 0.: state = Dropout(dropout_rate)(state) return output(state)