Python keras.layers.InputSpec() Examples
The following are 30
code examples of keras.layers.InputSpec().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras.layers
, or try the search function
.
Example #1
Source File: attention.py From keras-utility-layer-collection with MIT License | 6 votes |
def build(self, input_shape): self._validate_input_shape(input_shape) self.input_spec = InputSpec(shape=input_shape) if not self.layer.built: self.layer.build(input_shape) self.layer.built = True input_dim = input_shape[-1] if self.layer.return_sequences: output_dim = self.layer.compute_output_shape(input_shape)[0][-1] else: output_dim = self.layer.compute_output_shape(input_shape)[-1] self._W1 = self.add_weight(shape=(input_dim, input_dim), name="{}_W1".format(self.name), initializer=self.weight_initializer) self._W2 = self.add_weight(shape=(output_dim, input_dim), name="{}_W2".format(self.name), initializer=self.weight_initializer) self._W3 = self.add_weight(shape=(2*input_dim, input_dim), name="{}_W3".format(self.name), initializer=self.weight_initializer) self._b2 = self.add_weight(shape=(input_dim,), name="{}_b2".format(self.name), initializer=self.weight_initializer) self._b3 = self.add_weight(shape=(input_dim,), name="{}_b3".format(self.name), initializer=self.weight_initializer) self._V = self.add_weight(shape=(input_dim,1), name="{}_V".format(self.name), initializer=self.weight_initializer) super(AttentionRNNWrapper, self).build()
Example #2
Source File: attentive_convlstm.py From sam with MIT License | 6 votes |
def __init__(self, nb_filters_in, nb_filters_out, nb_filters_att, nb_rows, nb_cols, init='normal', inner_init='orthogonal', attentive_init='zero', activation='tanh', inner_activation='sigmoid', W_regularizer=None, U_regularizer=None, weights=None, go_backwards=False, **kwargs): self.nb_filters_in = nb_filters_in self.nb_filters_out = nb_filters_out self.nb_filters_att = nb_filters_att self.nb_rows = nb_rows self.nb_cols = nb_cols self.init = initializations.get(init) self.inner_init = initializations.get(inner_init) self.attentive_init = initializations.get(attentive_init) self.activation = activations.get(activation) self.inner_activation = activations.get(inner_activation) self.initial_weights = weights self.go_backwards = go_backwards self.W_regularizer = W_regularizer self.U_regularizer = U_regularizer self.input_spec = [InputSpec(ndim=5)] super(AttentiveConvLSTM, self).__init__(**kwargs)
Example #3
Source File: SparseFullyConnectedLayer.py From NeuralResponseRanking with MIT License | 6 votes |
def __init__(self, output_dim, init='glorot_uniform', activation='relu',weights=None, W_regularizer=None, b_regularizer=None, activity_regularizer=None, W_constraint=None, b_constraint=None, input_dim=None, **kwargs): self.W_initializer = initializers.get(init) self.b_initializer = initializers.get('zeros') self.activation = activations.get(activation) self.output_dim = output_dim self.input_dim = input_dim self.W_regularizer = regularizers.get(W_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.get(W_constraint) self.b_constraint = constraints.get(b_constraint) self.initial_weights = weights self.input_spec = InputSpec(ndim=2) if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(SparseFullyConnectedLayer, self).__init__(**kwargs)
Example #4
Source File: SparseFullyConnectedLayer.py From NeuralResponseRanking with MIT License | 6 votes |
def build(self, input_shape): assert len(input_shape) == 2 input_dim = input_shape[1] #self.input_spec = InputSpec(dtype=K.floatx(), shape=(None, input_dim)) self.input_spec = InputSpec(ndim=2, axes={1: input_dim}) self.W = self.add_weight( shape=(input_dim, self.output_dim), initializer=self.W_initializer, name='SparseFullyConnected_W', regularizer=self.W_regularizer, constraint=self.W_constraint) self.b = self.add_weight( shape=(self.output_dim,), initializer=self.b_initializer, name='SparseFullyConnected_b', regularizer=self.b_regularizer, constraint=self.b_constraint) if self.initial_weights is not None: self.set_weights(self.initial_weights) del self.initial_weights #self.built = True #super(SparseFullyConnectedLayer, self).build(input_shape)
Example #5
Source File: gcnn.py From nn_playground with MIT License | 6 votes |
def build(self, input_shape): input_dim = input_shape[2] self.input_dim = input_dim self.input_spec = [InputSpec(shape=input_shape)] self.kernel_shape = (self.window_size, 1, input_dim, self.output_dim * 2) self.kernel = self.add_weight(self.kernel_shape, initializer=self.kernel_initializer, name='kernel', regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) if self.use_bias: self.bias = self.add_weight((self.output_dim * 2,), initializer=self.bias_initializer, name='b', regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.built = True
Example #6
Source File: capslayers.py From deepcaps with MIT License | 6 votes |
def __init__(self, ch_j, n_j, r_num=1, b_alphas=[8, 8, 8], kernel_initializer='glorot_uniform', kernel_regularizer=None, activity_regularizer=None, kernel_constraint=None, **kwargs): if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim'),) super(DenseCaps, self).__init__(**kwargs) self.ch_j = ch_j # number of capsules in layer J self.n_j = n_j # number of neurons in a capsule in J self.r_num = r_num self.b_alphas = b_alphas self.kernel_initializer = initializers.get(kernel_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.input_spec = InputSpec(min_ndim=3) self.supports_masking = True
Example #7
Source File: shareable_gru.py From deep_qa with Apache License 2.0 | 6 votes |
def call(self, x, mask=None, **kwargs): input_shape = K.int_shape(x) res = super(ShareableGRU, self).call(x, mask, **kwargs) self.input_spec = [InputSpec(shape=(self.input_spec[0].shape[0], None, self.input_spec[0].shape[2]))] if K.ndim(x) == K.ndim(res): # A recent change in Keras # (https://github.com/fchollet/keras/commit/a9b6bef0624c67d6df1618ca63d8e8141b0df4d0) # made it so that K.rnn with a tensorflow backend does not retain shape information for # the sequence length, even if it's present in the input. We need to fix that here so # that our models have the right shape information. A simple K.reshape is good enough # to fix this. result_shape = K.int_shape(res) if input_shape[1] is not None and result_shape[1] is None: shape = (input_shape[0] if input_shape[0] is not None else -1, input_shape[1], result_shape[2]) res = K.reshape(res, shape=shape) return res
Example #8
Source File: time_distributed.py From deep_qa with Apache License 2.0 | 6 votes |
def build(self, input_shape): if isinstance(input_shape, tuple): input_shape = [input_shape] assert all(len(shape) >= 3 for shape in input_shape), "Need 3 dims to TimeDistribute" all_timesteps = [i[1] for i in input_shape] assert len(set(all_timesteps)) == 1, "Tensors must have same number of timesteps" self.input_spec = [InputSpec(shape=shape) for shape in input_shape] if not self.layer.built: child_input_shape = [(shape[0],) + shape[2:] for shape in input_shape] if len(input_shape) == 1: child_input_shape = child_input_shape[0] self.layer.build(child_input_shape) self.layer.built = True self.built = True # It's important that we call Wrapper.build() here, because it sets some important member # variables. But we can't call KerasTimeDistributed.build(), because it assumes only one # input, which we're trying to fix. So we use super(KerasTimeDistributed, self).build() # here on purpose - this is not a copy-paste bug. super(KerasTimeDistributed, self).build(input_shape) # pylint: disable=bad-super-call
Example #9
Source File: binary_layers.py From QuantizedNeuralNetworks-Keras-Tensorflow with BSD 3-Clause "New" or "Revised" License | 5 votes |
def build(self, input_shape): assert len(input_shape) >= 2 input_dim = input_shape[1] if self.H == 'Glorot': self.H = np.float32(np.sqrt(1.5 / (input_dim + self.units))) #print('Glorot H: {}'.format(self.H)) if self.kernel_lr_multiplier == 'Glorot': self.kernel_lr_multiplier = np.float32(1. / np.sqrt(1.5 / (input_dim + self.units))) #print('Glorot learning rate multiplier: {}'.format(self.kernel_lr_multiplier)) self.kernel_constraint = Clip(-self.H, self.H) self.kernel_initializer = initializers.RandomUniform(-self.H, self.H) self.kernel = self.add_weight(shape=(input_dim, self.units), initializer=self.kernel_initializer, name='kernel', regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) if self.use_bias: self.lr_multipliers = [self.kernel_lr_multiplier, self.bias_lr_multiplier] self.bias = self.add_weight(shape=(self.output_dim,), initializer=self.bias_initializer, name='bias', regularizer=self.bias_regularizer, constraint=self.bias_constraint) else: self.lr_multipliers = [self.kernel_lr_multiplier] self.bias = None self.input_spec = InputSpec(min_ndim=2, axes={-1: input_dim}) self.built = True
Example #10
Source File: keras_models.py From emnlp2017-relation-extraction with Apache License 2.0 | 5 votes |
def __init__(self, **kwargs): super(GlobalSumPooling1D, self).__init__(**kwargs) self.input_spec = [layers.InputSpec(ndim=3)]
Example #11
Source File: qrnn.py From qrnn with MIT License | 5 votes |
def build(self, input_shape): if self.stateful: self.reset_states() else: # initial states: all-zero tensor of shape (output_dim) self.states = [None] input_dim = input_shape[2] self.input_spec = [InputSpec(shape=input_shape)] self.W_shape = (self.window_size, 1, input_dim, self.output_dim) self.W_z = self.init(self.W_shape, name='{}_W_z'.format(self.name)) self.W_f = self.init(self.W_shape, name='{}_W_f'.format(self.name)) self.W_o = self.init(self.W_shape, name='{}_W_o'.format(self.name)) self.trainable_weights = [self.W_z, self.W_f, self.W_o] self.W = K.concatenate([self.W_z, self.W_f, self.W_o], 1) if self.bias: self.b_z = K.zeros((self.output_dim,), name='{}_b_z'.format(self.name)) self.b_f = K.zeros((self.output_dim,), name='{}_b_f'.format(self.name)) self.b_o = K.zeros((self.output_dim,), name='{}_b_o'.format(self.name)) self.trainable_weights += [self.b_z, self.b_f, self.b_o] self.b = K.concatenate([self.b_z, self.b_f, self.b_o]) self.regularizers = [] if self.W_regularizer: self.W_regularizer.set_param(self.W) self.regularizers.append(self.W_regularizer) if self.bias and self.b_regularizer: self.b_regularizer.set_param(self.b) self.regularizers.append(self.b_regularizer) self.constraints = {} if self.W_constraint: self.constraints[self.W] = self.W_constraint if self.bias and self.b_constraint: self.constraints[self.b] = self.b_constraint if self.initial_weights is not None: self.set_weights(self.initial_weights) del self.initial_weights
Example #12
Source File: qrnn.py From qrnn with MIT License | 5 votes |
def __init__(self, output_dim, window_size=2, return_sequences=False, go_backwards=False, stateful=False, unroll=False, subsample_length=1, init='uniform', activation='tanh', W_regularizer=None, b_regularizer=None, W_constraint=None, b_constraint=None, dropout=0, weights=None, bias=True, input_dim=None, input_length=None, **kwargs): self.return_sequences = return_sequences self.go_backwards = go_backwards self.stateful = stateful self.unroll = unroll self.output_dim = output_dim self.window_size = window_size self.subsample = (subsample_length, 1) self.bias = bias self.init = initializations.get(init) self.activation = activations.get(activation) self.W_regularizer = regularizers.get(W_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.W_constraint = constraints.get(W_constraint) self.b_constraint = constraints.get(b_constraint) self.dropout = dropout if self.dropout is not None and 0. < self.dropout < 1.: self.uses_learning_phase = True self.initial_weights = weights self.supports_masking = True self.input_spec = [InputSpec(ndim=3)] self.input_dim = input_dim self.input_length = input_length if self.input_dim: kwargs['input_shape'] = (self.input_length, self.input_dim) super(QRNN, self).__init__(**kwargs)
Example #13
Source File: models.py From keras_attention with MIT License | 5 votes |
def build(self, input_shape): self.input_spec = [InputSpec(ndim=3)] assert len(input_shape) == 3 self.w = self.add_weight(shape=(input_shape[2], 1), name='{}_w'.format(self.name), initializer=self.init) self.trainable_weights = [self.w] super(AttentionWeightedAverage, self).build(input_shape)
Example #14
Source File: binary_layers.py From nn_playground with MIT License | 5 votes |
def build(self, input_shape): assert len(input_shape) >= 2 input_dim = input_shape[1] if self.H == 'Glorot': self.H = np.float32(np.sqrt(1.5 / (input_dim + self.units))) #print('Glorot H: {}'.format(self.H)) if self.kernel_lr_multiplier == 'Glorot': self.kernel_lr_multiplier = np.float32(1. / np.sqrt(1.5 / (input_dim + self.units))) #print('Glorot learning rate multiplier: {}'.format(self.kernel_lr_multiplier)) self.kernel_constraint = Clip(-self.H, self.H) self.kernel_initializer = initializers.RandomUniform(-self.H, self.H) self.kernel = self.add_weight(shape=(input_dim, self.units), initializer=self.kernel_initializer, name='kernel', regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) if self.use_bias: self.lr_multipliers = [self.kernel_lr_multiplier, self.bias_lr_multiplier] self.bias = self.add_weight(shape=(self.output_dim,), initializer=self.bias_initializer, name='bias', regularizer=self.bias_regularizer, constraint=self.bias_constraint) else: self.lr_multipliers = [self.kernel_lr_multiplier] self.bias = None self.input_spec = InputSpec(min_ndim=2, axes={-1: input_dim}) self.built = True
Example #15
Source File: qrnn.py From nn_playground with MIT License | 5 votes |
def __init__(self, units, window_size=2, stride=1, return_sequences=False, go_backwards=False, stateful=False, unroll=False, activation='tanh', kernel_initializer='uniform', bias_initializer='zero', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, dropout=0, use_bias=True, input_dim=None, input_length=None, **kwargs): self.return_sequences = return_sequences self.go_backwards = go_backwards self.stateful = stateful self.unroll = unroll self.units = units self.window_size = window_size self.strides = (stride, 1) self.use_bias = use_bias self.activation = activations.get(activation) self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.recurrent_dropout = 0 #not used, added to maintain compatibility with keras.Bidirectional self.dropout = dropout self.supports_masking = True self.input_spec = [InputSpec(ndim=3)] self.input_dim = input_dim self.input_length = input_length if self.input_dim: kwargs['input_shape'] = (self.input_length, self.input_dim) super(QRNN, self).__init__(**kwargs)
Example #16
Source File: qrnn.py From nn_playground with MIT License | 5 votes |
def build(self, input_shape): if isinstance(input_shape, list): input_shape = input_shape[0] batch_size = input_shape[0] if self.stateful else None self.input_dim = input_shape[2] self.input_spec = InputSpec(shape=(batch_size, None, self.input_dim)) self.state_spec = InputSpec(shape=(batch_size, self.units)) self.states = [None] if self.stateful: self.reset_states() kernel_shape = (self.window_size, 1, self.input_dim, self.units * 3) self.kernel = self.add_weight(name='kernel', shape=kernel_shape, initializer=self.kernel_initializer, regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) if self.use_bias: self.bias = self.add_weight(name='bias', shape=(self.units * 3,), initializer=self.bias_initializer, regularizer=self.bias_regularizer, constraint=self.bias_constraint) self.built = True
Example #17
Source File: layers.py From nn_playground with MIT License | 5 votes |
def build(self, input_shape): assert len(input_shape) == 4 self.input_spec = InputSpec(shape=input_shape) if self.data_format == 'channels_first': channel_axis = 1 else: channel_axis = 3 channels = input_shape[channel_axis] self.kernel1 = self.add_weight(shape=(channels, channels // self.ratio), initializer=self.kernel_initializer, name='kernel1', regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) if self.use_bias: self.bias1 = self.add_weight(shape=(channels // self.ratio,), initializer=self.bias_initializer, name='bias1', regularizer=self.bias_regularizer, constraint=self.bias_constraint) else: self.bias1 = None self.kernel2 = self.add_weight(shape=(channels // self.ratio, channels), initializer=self.kernel_initializer, name='kernel2', regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) if self.use_bias: self.bias2 = self.add_weight(shape=(channels,), initializer=self.bias_initializer, name='bias2', regularizer=self.bias_regularizer, constraint=self.bias_constraint) else: self.bias2 = None self.built = True
Example #18
Source File: models_pix2pixhd.py From Hands-On-Generative-Adversarial-Networks-with-Keras with MIT License | 5 votes |
def __init__(self, padding=(1, 1), **kwargs): if type(padding) == int: padding = (padding, padding) self.padding = padding self.input_spec = [InputSpec(ndim=4)] super(ReflectionPadding2D, self).__init__(**kwargs)
Example #19
Source File: ternary_layers.py From nn_playground with MIT License | 5 votes |
def build(self, input_shape): assert len(input_shape) >= 2 input_dim = input_shape[1] if self.H == 'Glorot': self.H = np.float32(np.sqrt(1.5 / (input_dim + self.units))) #print('Glorot H: {}'.format(self.H)) if self.kernel_lr_multiplier == 'Glorot': self.kernel_lr_multiplier = np.float32(1. / np.sqrt(1.5 / (input_dim + self.units))) #print('Glorot learning rate multiplier: {}'.format(self.kernel_lr_multiplier)) self.kernel_constraint = Clip(-self.H, self.H) self.kernel_initializer = initializers.RandomUniform(-self.H, self.H) self.kernel = self.add_weight(shape=(input_dim, self.units), initializer=self.kernel_initializer, name='kernel', regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) if self.use_bias: self.lr_multipliers = [self.kernel_lr_multiplier, self.bias_lr_multiplier] self.bias = self.add_weight(shape=(self.output_dim,), initializer=self.bias_initializer, name='bias', regularizer=self.bias_regularizer, constraint=self.bias_constraint) else: self.lr_multipliers = [self.kernel_lr_multiplier] self.bias = None self.input_spec = InputSpec(min_ndim=2, axes={-1: input_dim}) self.built = True
Example #20
Source File: capslayers.py From deepcaps with MIT License | 5 votes |
def __init__(self, **kwargs): super(ConvertToCaps, self).__init__(**kwargs) # self.input_spec = InputSpec(min_ndim=2)
Example #21
Source File: capslayers.py From deepcaps with MIT License | 5 votes |
def __init__(self, **kwargs): super(FlattenCaps, self).__init__(**kwargs) self.input_spec = InputSpec(min_ndim=4)
Example #22
Source File: capslayers.py From deepcaps with MIT License | 5 votes |
def __init__(self, **kwargs): super(CapsToScalars, self).__init__(**kwargs) self.input_spec = InputSpec(min_ndim=3)
Example #23
Source File: capslayers.py From deepcaps with MIT License | 5 votes |
def __init__(self, ch_j, n_j, kernel_size=(3, 3), strides=(1, 1), r_num=1, b_alphas=[8, 8, 8], padding='same', data_format='channels_last', dilation_rate=(1, 1), kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, activity_regularizer=None, kernel_constraint=None, **kwargs): super(Conv2DCaps, self).__init__(**kwargs) rank = 2 self.ch_j = ch_j # Number of capsules in layer J self.n_j = n_j # Number of neurons in a capsule in J self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.r_num = r_num self.b_alphas = b_alphas self.padding = conv_utils.normalize_padding(padding) #self.data_format = conv_utils.normalize_data_format(data_format) self.data_format = K.normalize_data_format(data_format) self.dilation_rate = (1, 1) self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.input_spec = InputSpec(ndim=rank + 3)
Example #24
Source File: quantized_layers.py From QuantizedNeuralNetworks-Keras-Tensorflow with BSD 3-Clause "New" or "Revised" License | 5 votes |
def build(self, input_shape): assert len(input_shape) >= 2 input_dim = input_shape[1] if self.H == 'Glorot': self.H = np.float32(np.sqrt(1.5 / (input_dim + self.units))) #print('Glorot H: {}'.format(self.H)) if self.kernel_lr_multiplier == 'Glorot': self.kernel_lr_multiplier = np.float32(1. / np.sqrt(1.5 / (input_dim + self.units))) #print('Glorot learning rate multiplier: {}'.format(self.kernel_lr_multiplier)) self.kernel_constraint = Clip(-self.H, self.H) self.kernel_initializer = initializers.RandomUniform(-self.H, self.H) self.kernel = self.add_weight(shape=(input_dim, self.units), initializer=self.kernel_initializer, name='kernel', regularizer=self.kernel_regularizer, constraint=self.kernel_constraint) if self.use_bias: self.lr_multipliers = [self.kernel_lr_multiplier, self.bias_lr_multiplier] self.bias = self.add_weight(shape=(self.units,), initializer=self.bias_initializer, name='bias', regularizer=self.bias_regularizer, constraint=self.bias_constraint) else: self.lr_multipliers = [self.kernel_lr_multiplier] self.bias = None self.input_spec = InputSpec(min_ndim=2, axes={-1: input_dim}) self.built = True
Example #25
Source File: abn.py From ddan with MIT License | 5 votes |
def build(self, input_shape): dim = input_shape[self.axis] if dim is None: raise ValueError('Axis ' + str(self.axis) + ' of ' 'input tensor should have a defined dimension ' 'but the layer received an input with shape ' + str(input_shape) + '.') self.input_spec = InputSpec(ndim=len(input_shape), axes={self.axis: dim}) shape = (dim,) if self.scale: self.gamma = self.add_weight(shape=shape, name='gamma', initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint) else: self.gamma = None if self.center: self.beta = self.add_weight(shape=shape, name='beta', initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint) else: self.beta = None self.moving_mean = self.add_weight( shape=shape, name='moving_mean', initializer=self.moving_mean_initializer, trainable=False) self.moving_variance = self.add_weight( shape=shape, name='moving_variance', initializer=self.moving_variance_initializer, trainable=False) self.built = True
Example #26
Source File: dense.py From Quaternion-Convolutional-Neural-Networks-for-End-to-End-Automatic-Speech-Recognition with GNU General Public License v3.0 | 5 votes |
def build(self, input_shape): assert len(input_shape) == 2 assert input_shape[-1] % 2 == 0 input_dim = input_shape[-1] // 4 data_format = K.image_data_format() kernel_shape = (input_dim, self.units) init_shape = (input_dim, self.q_units) self.kernel_init = qdense_init(init_shape, self.init_criterion) self.kernel = self.add_weight( shape=kernel_shape, initializer=self.kernel_init, name='r', regularizer=self.kernel_regularizer, constraint=self.kernel_constraint ) if self.use_bias: self.bias = self.add_weight( shape=(self.units,), initializer='zeros', name='bias', regularizer=self.bias_regularizer, constraint=self.bias_constraint ) else: self.bias = None self.input_spec = InputSpec(ndim=2, axes={-1: 4 * input_dim}) self.built = True
Example #27
Source File: attention.py From keras-utility-layer-collection with MIT License | 5 votes |
def __init__(self, layer, weight_initializer="glorot_uniform", return_attention=False, **kwargs): assert isinstance(layer, RNN) self.layer = layer self.supports_masking = True self.weight_initializer = weight_initializer self.return_attention = return_attention self._num_constants = None super(ExternalAttentionRNNWrapper, self).__init__(layer, **kwargs) self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=3)]
Example #28
Source File: attention.py From keras-utility-layer-collection with MIT License | 5 votes |
def build(self, input_shape): self._validate_input_shape(input_shape) for i, x in enumerate(input_shape): self.input_spec[i] = InputSpec(shape=x) if not self.layer.built: self.layer.build(input_shape) self.layer.built = True temporal_input_dim = input_shape[0][-1] static_input_dim = input_shape[1][-1] if self.layer.return_sequences: output_dim = self.layer.compute_output_shape(input_shape[0])[0][-1] else: output_dim = self.layer.compute_output_shape(input_shape[0])[-1] self._W1 = self.add_weight(shape=(static_input_dim, temporal_input_dim), name="{}_W1".format(self.name), initializer=self.weight_initializer) self._W2 = self.add_weight(shape=(output_dim, temporal_input_dim), name="{}_W2".format(self.name), initializer=self.weight_initializer) self._W3 = self.add_weight(shape=(temporal_input_dim + static_input_dim, temporal_input_dim), name="{}_W3".format(self.name), initializer=self.weight_initializer) self._b2 = self.add_weight(shape=(temporal_input_dim,), name="{}_b2".format(self.name), initializer=self.weight_initializer) self._b3 = self.add_weight(shape=(temporal_input_dim,), name="{}_b3".format(self.name), initializer=self.weight_initializer) self._V = self.add_weight(shape=(temporal_input_dim, 1), name="{}_V".format(self.name), initializer=self.weight_initializer) super(ExternalAttentionRNNWrapper, self).build()
Example #29
Source File: dense.py From deep_complex_networks with MIT License | 5 votes |
def __init__(self, units, activation=None, use_bias=True, init_criterion='he', kernel_initializer='complex', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, seed=None, **kwargs): if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim'),) super(ComplexDense, self).__init__(**kwargs) self.units = units self.activation = activations.get(activation) self.use_bias = use_bias self.init_criterion = init_criterion if kernel_initializer in {'complex'}: self.kernel_initializer = kernel_initializer else: self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) if seed is None: self.seed = np.random.randint(1, 10e6) else: self.seed = seed self.input_spec = InputSpec(ndim=2) self.supports_masking = True
Example #30
Source File: norm.py From deep_complex_networks with MIT License | 5 votes |
def build(self, input_shape): self.input_spec = InputSpec(ndim=len(input_shape), axes={self.axis: input_shape[self.axis]}) shape = (input_shape[self.axis],) self.gamma = self.add_weight(shape, initializer=self.gamma_init, regularizer=self.gamma_regularizer, name='{}_gamma'.format(self.name)) self.beta = self.add_weight(shape, initializer=self.beta_init, regularizer=self.beta_regularizer, name='{}_beta'.format(self.name)) self.built = True