Python tensorflow.keras.regularizers.get() Examples

The following are 20 code examples of tensorflow.keras.regularizers.get(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.keras.regularizers , or try the search function .
Example #1
Source File: custom_activation.py    From Echo with MIT License 6 votes vote down vote up
def call(self, inputs):
        def brelu(x):
            # get shape of X, we are interested in the last axis, which is constant
            shape = K.int_shape(x)
            # last axis
            dim = shape[-1]
            # half of the last axis (+1 if necessary)
            dim2 = dim // 2
            if dim % 2 != 0:
                dim2 += 1
            # multiplier will be a tensor of alternated +1 and -1
            multiplier = K.ones((dim2,))
            multiplier = K.stack([multiplier, -multiplier], axis=-1)
            if dim % 2 != 0:
                multiplier = multiplier[:-1]
            # adjust multiplier shape to the shape of x
            multiplier = K.reshape(multiplier, tuple(1 for _ in shape[:-1]) + (-1,))
            return multiplier * tf.nn.relu(multiplier * x)

        return Lambda(brelu)(inputs) 
Example #2
Source File: topk_pool.py    From spektral with MIT License 6 votes vote down vote up
def __init__(self,
                 ratio,
                 return_mask=False,
                 sigmoid_gating=False,
                 kernel_initializer='glorot_uniform',
                 kernel_regularizer=None,
                 kernel_constraint=None,
                 **kwargs):
        super().__init__(**kwargs)
        self.ratio = ratio
        self.return_mask = return_mask
        self.sigmoid_gating = sigmoid_gating
        self.gating_op = K.sigmoid if self.sigmoid_gating else K.tanh
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint) 
Example #3
Source File: global_pool.py    From spektral with MIT License 6 votes vote down vote up
def __init__(self,
                 channels,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 **kwargs):
        super().__init__(**kwargs)
        self.channels = channels
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint) 
Example #4
Source File: diff_pool.py    From spektral with MIT License 6 votes vote down vote up
def __init__(self,
                 k,
                 channels=None,
                 return_mask=False,
                 activation=None,
                 kernel_initializer='glorot_uniform',
                 kernel_regularizer=None,
                 kernel_constraint=None,
                 **kwargs):

        super().__init__(**kwargs)
        self.k = k
        self.channels = channels
        self.return_mask = return_mask
        self.activation = activations.get(activation)
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint) 
Example #5
Source File: base.py    From megnet with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self,
                 activation: OptStrOrCallable = None,
                 use_bias: bool = True,
                 kernel_initializer: OptStrOrCallable = 'glorot_uniform',
                 bias_initializer: OptStrOrCallable = 'zeros',
                 kernel_regularizer: OptStrOrCallable = None,
                 bias_regularizer: OptStrOrCallable = None,
                 activity_regularizer: OptStrOrCallable = None,
                 kernel_constraint: OptStrOrCallable = None,
                 bias_constraint: OptStrOrCallable = None,
                 **kwargs):
        if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            kwargs['input_shape'] = (kwargs.pop('input_dim'),)
        self.activation = activations.get(activation)  # noqa
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        super().__init__(**kwargs) 
Example #6
Source File: graph_conv.py    From spektral with MIT License 6 votes vote down vote up
def __init__(self,
                 channels,
                 activation=None,
                 use_bias=True,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 **kwargs):

        super().__init__(activity_regularizer=activity_regularizer, **kwargs)
        self.channels = channels
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)
        self.supports_masking = False 
Example #7
Source File: UniRepModel.py    From tape-neurips2019 with MIT License 6 votes vote down vote up
def convert_sequence_vocab(self, sequence, sequence_lengths):
        PFAM_TO_UNIREP_ENCODED = {encoding: UNIREP_VOCAB.get(aa, 23) for aa, encoding in PFAM_VOCAB.items()}

        def to_uniprot_unirep(seq, seqlens):
            new_seq = np.zeros_like(seq)

            for pfam_encoding, unirep_encoding in PFAM_TO_UNIREP_ENCODED.items():
                new_seq[seq == pfam_encoding] = unirep_encoding

            # add start/stop
            new_seq = np.pad(new_seq, [[0, 0], [1, 1]], mode='constant')
            new_seq[:, 0] = UNIREP_VOCAB['<START>']
            new_seq[np.arange(new_seq.shape[0]), seqlens + 1] = UNIREP_VOCAB['<STOP>']

            return new_seq

        new_sequence = tf.py_func(to_uniprot_unirep, [sequence, sequence_lengths], sequence.dtype)
        new_sequence.set_shape([sequence.shape[0], sequence.shape[1] + 2])

        return new_sequence 
Example #8
Source File: groupnorm.py    From bcnn with MIT License 5 votes vote down vote up
def __init__(self,
                 groups=4,
                 axis=-1,
                 epsilon=1e-5,
                 center=True,
                 scale=True,
                 beta_initializer="zeros",
                 gamma_initializer="ones",
                 beta_regularizer=None,
                 gamma_regularizer=None,
                 beta_constraint=None,
                 gamma_constraint=None,
                 **kwargs):
        super(GroupNormalization, self).__init__(**kwargs)
        self.supports_masking = True
        self.groups = groups
        self.axis = axis
        self.epsilon = epsilon
        self.center = center
        self.scale = scale
        self.beta_initializer = initializers.get(beta_initializer)
        self.gamma_initializer = initializers.get(gamma_initializer)
        self.beta_regularizer = regularizers.get(beta_regularizer)
        self.gamma_regularizer = regularizers.get(gamma_regularizer)
        self.beta_constraint = constraints.get(beta_constraint)
        self.gamma_constraint = constraints.get(gamma_constraint) 
Example #9
Source File: custom_activation.py    From Echo with MIT License 5 votes vote down vote up
def __init__(
        self,
        alpha_initializer="zeros",
        b_initializer="zeros",
        S=1,
        alpha_regularizer=None,
        b_regularizer=None,
        alpha_constraint=None,
        b_constraint=None,
        shared_axes=None,
        **kwargs
    ):
        super(APL, self).__init__(**kwargs)
        self.supports_masking = True
        self.alpha_initializer = initializers.get(alpha_initializer)
        self.alpha_regularizer = regularizers.get(alpha_regularizer)
        self.alpha_constraint = constraints.get(alpha_constraint)
        self.b_initializer = initializers.get(b_initializer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.b_constraint = constraints.get(b_constraint)
        if shared_axes is None:
            self.shared_axes = None
        elif not isinstance(shared_axes, (list, tuple)):
            self.shared_axes = [shared_axes]
        else:
            self.shared_axes = list(shared_axes)
        self.S = S
        self.alpha_arr = []
        self.b_arr = [] 
Example #10
Source File: FRN.py    From TF.Keras-Commonly-used-models with Apache License 2.0 5 votes vote down vote up
def __init__(self,
                 epsilon=1e-6,
                 beta_initializer='zeros',
                 gamma_initializer='ones',
                 tau_initializers='zeros',
                 beta_regularizer=None,
                 gamma_regularizer=None,
                 tau_regularizer=None,
                 beta_constraint=None,
                 gamma_constraint=None,
                 tau_constraint=None,
                 **kwargs):
        super(FRN, self).__init__(**kwargs)
        self.supports_masking = True
        self.epsilon = epsilon
        self.beta_initializer = initializers.get(beta_initializer)
        self.tau_initializer = initializers.get(tau_initializers)
        self.gamma_initializer = initializers.get(gamma_initializer)
        self.beta_regularizer = regularizers.get(beta_regularizer)
        self.gamma_regularizer = regularizers.get(gamma_regularizer)
        self.tau_regularizer = regularizers.get(tau_regularizer)
        self.beta_constraint = constraints.get(beta_constraint)
        self.gamma_constraint = constraints.get(gamma_constraint)
        self.tau_constraint = constraints.get(tau_constraint)
        self.tau = None
        self.gamma = None
        self.beta = None
        self.axis = -1 
Example #11
Source File: set2set.py    From megnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self,
                 T=3,
                 n_hidden=512,
                 activation=None,
                 activation_lstm='tanh',
                 recurrent_activation='hard_sigmoid',
                 kernel_initializer='glorot_uniform',
                 recurrent_initializer='orthogonal',
                 bias_initializer='zeros',
                 use_bias=True,
                 unit_forget_bias=True,
                 kernel_regularizer=None,
                 recurrent_regularizer=None,
                 bias_regularizer=None,
                 kernel_constraint=None,
                 recurrent_constraint=None,
                 bias_constraint=None,
                 **kwargs):

        super().__init__(**kwargs)
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)

        self.activation_lstm = activations.get(activation_lstm)
        self.recurrent_activation = activations.get(recurrent_activation)
        self.recurrent_initializer = initializers.get(recurrent_initializer)
        self.unit_forget_bias = unit_forget_bias
        self.recurrent_regularizer = regularizers.get(recurrent_regularizer)
        self.recurrent_constraint = constraints.get(recurrent_constraint)
        self.T = T
        self.n_hidden = n_hidden 
Example #12
Source File: group_norm.py    From 3d-brain-tumor-segmentation with Apache License 2.0 5 votes vote down vote up
def __init__(self,
                 groups=8,
                 axis=-1,
                 epsilon=1e-5,
                 center=True,
                 scale=True,
                 beta_initializer='zeros',
                 gamma_initializer='ones',
                 beta_regularizer=None,
                 gamma_regularizer=None,
                 beta_constraint=None,
                 gamma_constraint=None,
                 **kwargs):
        """ Initializes one group normalization layer.

            References:
                - [Group Normalization](https://arxiv.org/abs/1803.08494)
        """
        super(GroupNormalization, self).__init__(**kwargs)
        self.supports_masking = True
        self.groups = groups
        self.axis = axis
        self.epsilon = epsilon
        self.center = center
        self.scale = scale
        self.beta_initializer = initializers.get(beta_initializer)
        self.gamma_initializer = initializers.get(gamma_initializer)
        self.beta_regularizer = regularizers.get(beta_regularizer)
        self.gamma_regularizer = regularizers.get(gamma_regularizer)
        self.beta_constraint = constraints.get(beta_constraint)
        self.gamma_constraint = constraints.get(gamma_constraint) 
Example #13
Source File: se_mobilenets.py    From keras-squeeze-excite-network with MIT License 5 votes vote down vote up
def __init__(self,
                 kernel_size,
                 strides=(1, 1),
                 padding='valid',
                 depth_multiplier=1,
                 data_format=None,
                 activation=None,
                 use_bias=True,
                 depthwise_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 depthwise_regularizer=None,
                 bias_regularizer=None,
                 activity_regularizer=None,
                 depthwise_constraint=None,
                 bias_constraint=None,
                 **kwargs):
        super(DepthwiseConv2D, self).__init__(
            filters=None,
            kernel_size=kernel_size,
            strides=strides,
            padding=padding,
            data_format=data_format,
            activation=activation,
            use_bias=use_bias,
            bias_regularizer=bias_regularizer,
            activity_regularizer=activity_regularizer,
            bias_constraint=bias_constraint,
            **kwargs)
        self.depth_multiplier = depth_multiplier
        self.depthwise_initializer = initializers.get(depthwise_initializer)
        self.depthwise_regularizer = regularizers.get(depthwise_regularizer)
        self.depthwise_constraint = constraints.get(depthwise_constraint)
        self.bias_initializer = initializers.get(bias_initializer)
        self.depthwise_kernel = None
        self.bias = None 
Example #14
Source File: conv_mod.py    From StyleGAN2-Tensorflow-2.0 with MIT License 5 votes vote down vote up
def __init__(self,
                 filters,
                 kernel_size,
                 strides=1,
                 padding='valid',
                 dilation_rate=1,
                 kernel_initializer='glorot_uniform',
                 kernel_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 demod=True,
                 **kwargs):
        super(Conv2DMod, self).__init__(**kwargs)
        self.filters = filters
        self.rank = 2
        self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size')
        self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
        self.padding = conv_utils.normalize_padding(padding)
        self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2, 'dilation_rate')
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.demod = demod
        self.input_spec = [InputSpec(ndim = 4),
                            InputSpec(ndim = 2)] 
Example #15
Source File: mincut_pool.py    From spektral with MIT License 5 votes vote down vote up
def __init__(self,
                 k,
                 mlp_hidden=None,
                 mlp_activation='relu',
                 return_mask=False,
                 activation=None,
                 use_bias=True,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 **kwargs):

        super().__init__(**kwargs)
        self.k = k
        self.mlp_hidden = mlp_hidden if mlp_hidden else []
        self.mlp_activation = mlp_activation
        self.return_mask = return_mask
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint) 
Example #16
Source File: base.py    From spektral with MIT License 5 votes vote down vote up
def __init__(self,
                 input_dim_1=None,
                 activation=None,
                 **kwargs):

        super(MinkowskiProduct, self).__init__(**kwargs)
        self.input_dim_1 = input_dim_1
        self.activation = activations.get(activation) 
Example #17
Source File: base.py    From spektral with MIT License 5 votes vote down vote up
def __init__(self,
                 trainable_kernel=False,
                 activation=None,
                 kernel_initializer='glorot_uniform',
                 kernel_regularizer=None,
                 kernel_constraint=None,
                 **kwargs):

        super().__init__(**kwargs)
        self.trainable_kernel = trainable_kernel
        self.activation = activations.get(activation)
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint) 
Example #18
Source File: keras.py    From spektral with MIT License 5 votes vote down vote up
def deserialize_kwarg(key, attr):
    if key.endswith('_initializer'):
        return initializers.get(attr)
    if key.endswith('_regularizer'):
        return regularizers.get(attr)
    if key.endswith('_constraint'):
        return constraints.get(attr)
    if key == 'activation':
        return activations.get(attr) 
Example #19
Source File: conv2d_mpo.py    From TensorNetwork with Apache License 2.0 4 votes vote down vote up
def __init__(self,
               filters: int,
               kernel_size: Union[int, Tuple[int, int]],
               num_nodes: int,
               bond_dim: int,
               strides: Union[int, Tuple[int, int]] = 1,
               padding: Text = "same",
               data_format: Optional[Text] = "channels_last",
               dilation_rate: Union[int, Tuple[int, int]] = (1, 1),
               activation: Optional[Text] = None,
               use_bias: bool = True,
               kernel_initializer: Text = "glorot_uniform",
               bias_initializer: Text = "zeros",
               kernel_regularizer: Optional[Text] = None,
               bias_regularizer: Optional[Text] = None,
               **kwargs) -> None:
    if num_nodes < 2:
      raise ValueError('Need at least 2 nodes to create MPO')

    if padding not in ('same', 'valid'):
      raise ValueError('Padding must be "same" or "valid"')

    if data_format not in ['channels_first', 'channels_last']:
      raise ValueError('Invalid data_format string provided')

    super(Conv2DMPO, self).__init__(**kwargs)

    self.nodes = []
    self.filters = filters
    self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size')
    self.num_nodes = num_nodes
    self.bond_dim = bond_dim
    self.strides = conv_utils.normalize_tuple(strides, 2, 'kernel_size')
    self.padding = padding
    self.data_format = data_format
    self.dilation_rate = conv_utils.normalize_tuple(dilation_rate,
                                                    2, 'dilation_rate')
    self.activation = activations.get(activation)
    self.use_bias = use_bias
    self.kernel_initializer = initializers.get(kernel_initializer)
    self.bias_initializer = initializers.get(bias_initializer)
    self.kernel_regularizer = regularizers.get(kernel_regularizer)
    self.bias_regularizer = regularizers.get(bias_regularizer) 
Example #20
Source File: graph_attention.py    From spektral with MIT License 4 votes vote down vote up
def __init__(self,
                 channels,
                 attn_heads=1,
                 concat_heads=True,
                 dropout_rate=0.5,
                 return_attn_coef=False,
                 activation=None,
                 use_bias=True,
                 kernel_initializer='glorot_uniform',
                 bias_initializer='zeros',
                 attn_kernel_initializer='glorot_uniform',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 attn_kernel_regularizer=None,
                 activity_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 attn_kernel_constraint=None,
                 **kwargs):
        super().__init__(channels,
                         activation=activation,
                         use_bias=use_bias,
                         kernel_initializer=kernel_initializer,
                         bias_initializer=bias_initializer,
                         kernel_regularizer=kernel_regularizer,
                         bias_regularizer=bias_regularizer,
                         activity_regularizer=activity_regularizer,
                         kernel_constraint=kernel_constraint,
                         bias_constraint=bias_constraint,
                         **kwargs)
        self.attn_heads = attn_heads
        self.concat_heads = concat_heads
        self.dropout_rate = dropout_rate
        self.return_attn_coef = return_attn_coef
        self.attn_kernel_initializer = initializers.get(attn_kernel_initializer)
        self.attn_kernel_regularizer = regularizers.get(attn_kernel_regularizer)
        self.attn_kernel_constraint = constraints.get(attn_kernel_constraint)

        if concat_heads:
            # Output will have shape (..., attention_heads * channels)
            self.output_dim = self.channels * self.attn_heads
        else:
            # Output will have shape (..., channels)
            self.output_dim = self.channels