Python keras.initializers.glorot_normal() Examples

The following are 23 code examples of keras.initializers.glorot_normal(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module keras.initializers , or try the search function .
Example #1
Source File: layers.py    From deepar with MIT License 6 votes vote down vote up
def build(self, input_shape):
        n_weight_rows = input_shape[2]
        self.kernel_1 = self.add_weight(name='kernel_1',
                                        shape=(n_weight_rows, self.output_dim),
                                        initializer=glorot_normal(),
                                        trainable=True)
        self.kernel_2 = self.add_weight(name='kernel_2',
                                        shape=(n_weight_rows, self.output_dim),
                                        initializer=glorot_normal(),
                                        trainable=True)
        self.bias_1 = self.add_weight(name='bias_1',
                                      shape=(self.output_dim,),
                                      initializer=glorot_normal(),
                                      trainable=True)
        self.bias_2 = self.add_weight(name='bias_2',
                                      shape=(self.output_dim,),
                                      initializer=glorot_normal(),
                                      trainable=True)
        super(GaussianLayer, self).build(input_shape) 
Example #2
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def _build_fn_glorot_normal_1(input_shape):  # `"glorot_normal"`
    model = Sequential(
        [
            Dense(Integer(50, 100), input_shape=input_shape),
            Dense(1, kernel_initializer="glorot_normal"),
        ]
    )
    model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"])
    return model


#################### `orthogonal` - Excluding default (`Initializer`) #################### 
Example #3
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def test_in_categorical_4(self, old_opt):  # `Categorical(["glorot_normal", O(gain=1)])`
        assert in_similar_experiment_ids(old_opt, self.opt_g_4) 
Example #4
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def test_in_categorical_3(self, old_opt):  # `Categorical(["glorot_normal", o(gain=1)])`
        assert in_similar_experiment_ids(old_opt, self.opt_g_3) 
Example #5
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def test_in_categorical_2(self, old_opt):  # `Categorical([glorot_normal(), O()])`
        assert in_similar_experiment_ids(old_opt, self.opt_g_2) 
Example #6
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def test_in_categorical_0(self, old_opt):  # `Categorical(["glorot_normal", "o"])`
        assert in_similar_experiment_ids(old_opt, self.opt_g_0) 
Example #7
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def test_in_space_inclusive_callable(self, old_opt, new_opt):
        assert in_similar_experiment_ids(old_opt, new_opt)

    ##################################################
    # `glorot_normal` (`VarianceScaling`)
    ################################################## 
Example #8
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def _build_fn_categorical_4(input_shape):  # `Categorical(["glorot_normal", Orthogonal(gain=1)])`
    model = Sequential(
        [
            Dense(Integer(50, 100), input_shape=input_shape),
            Dense(1, kernel_initializer=Categorical(["glorot_normal", Orthogonal(gain=1)])),
        ]
    )
    model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"])
    return model 
Example #9
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def _build_fn_categorical_3(input_shape):  # `Categorical(["glorot_normal", orthogonal(gain=1)])`
    model = Sequential(
        [
            Dense(Integer(50, 100), input_shape=input_shape),
            Dense(1, kernel_initializer=Categorical(["glorot_normal", orthogonal(gain=1)])),
        ]
    )
    model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"])
    return model 
Example #10
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def _build_fn_categorical_2(input_shape):  # `Categorical([glorot_normal(), Orthogonal()])`
    model = Sequential(
        [
            Dense(Integer(50, 100), input_shape=input_shape),
            Dense(1, kernel_initializer=Categorical([glorot_normal(), Orthogonal()])),
        ]
    )
    model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"])
    return model 
Example #11
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def _build_fn_categorical_0(input_shape):  # `Categorical(["glorot_normal", "orthogonal"])`
    model = Sequential(
        [
            Dense(Integer(50, 100), input_shape=input_shape),
            Dense(1, kernel_initializer=Categorical(["glorot_normal", "orthogonal"])),
        ]
    )
    model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"])
    return model 
Example #12
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def _build_fn_glorot_normal_0(input_shape):  # `glorot_normal()`
    model = Sequential(
        [
            Dense(Integer(50, 100), input_shape=input_shape),
            Dense(1, kernel_initializer=glorot_normal()),
        ]
    )
    model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"])
    return model 
Example #13
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def run_initialization_matching_optimization_0(build_fn):
    optimizer = DummyOptPro(iterations=1)
    optimizer.forge_experiment(
        model_initializer=KerasClassifier,
        model_init_params=dict(build_fn=build_fn),
        model_extra_params=dict(epochs=1, batch_size=128, verbose=0),
    )
    optimizer.go()
    return optimizer


#################### `glorot_normal` (`VarianceScaling`) #################### 
Example #14
Source File: test_keras.py    From hyperparameter_hunter with MIT License 5 votes vote down vote up
def _build_fn_regressor(input_shape):
    model = Sequential(
        [
            Dense(100, activation="relu", input_shape=input_shape),
            Dense(Integer(40, 60), activation="relu", kernel_initializer="glorot_normal"),
            Dropout(Real(0.2, 0.7)),
            Dense(1, activation=Categorical(["relu", "sigmoid"]), kernel_initializer="orthogonal"),
        ]
    )
    model.compile(
        optimizer=Categorical(["adam", "rmsprop"]),
        loss="mean_absolute_error",
        metrics=["mean_absolute_error"],
    )
    return model 
Example #15
Source File: submission_v50.py    From Quora with MIT License 5 votes vote down vote up
def get_model(embed_weights):
    input_layer = Input(shape=(MAX_LEN, ), name='input')
    # 1. embedding layer
    # get embedding weights
    print('load pre-trained embedding weights ......')
    input_dim = embed_weights.shape[0]
    output_dim = embed_weights.shape[1]
    x = Embedding(
        input_dim=input_dim,
        output_dim=output_dim,
        weights=[embed_weights],
        trainable=False,
        name='embedding'
    )(input_layer)
    # clean up
    del embed_weights, input_dim, output_dim
    gc.collect()
    # 2. dropout
    x = SpatialDropout1D(rate=SPATIAL_DROPOUT)(x)
    # 3. bidirectional lstm
    x = Bidirectional(
        layer=CuDNNLSTM(RNN_UNITS, return_sequences=True,
                        kernel_initializer=glorot_normal(seed=1029),
                        recurrent_initializer=orthogonal(gain=1.0, seed=1029)),
        name='bidirectional_lstm')(x)
    # 4. capsule layer
    capsul = Capsule(num_capsule=10, dim_capsule=10, routings=4, share_weights=True)(x) # noqa
    capsul = Flatten()(capsul)
    capsul = DropConnect(Dense(32, activation="relu"), prob=0.01)(capsul)

    # 5. attention later
    atten = Attention(step_dim=MAX_LEN, name='attention')(x)
    atten = DropConnect(Dense(16, activation="relu"), prob=0.05)(atten)
    x = Concatenate(axis=-1)([capsul, atten])

    # 6. output (sigmoid)
    output_layer = Dense(units=1, activation='sigmoid', name='output')(x)
    model = Model(inputs=input_layer, outputs=output_layer)
    # compile model
    model.compile(loss='binary_crossentropy', optimizer='adam')
    return model 
Example #16
Source File: initializers_test.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def test_glorot_normal(tensor_shape):
    fan_in, fan_out = initializers._compute_fans(tensor_shape)
    scale = np.sqrt(2. / (fan_in + fan_out))
    _runner(initializers.glorot_normal(), tensor_shape,
            target_mean=0., target_std=None, target_max=2 * scale) 
Example #17
Source File: initializers_test.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def test_glorot_normal(tensor_shape):
    fan_in, fan_out = initializers._compute_fans(tensor_shape)
    scale = np.sqrt(2. / (fan_in + fan_out))
    _runner(initializers.glorot_normal(), tensor_shape,
            target_mean=0., target_std=None, target_max=2 * scale) 
Example #18
Source File: initializers_test.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def test_glorot_normal(tensor_shape):
    fan_in, fan_out = initializers._compute_fans(tensor_shape)
    scale = np.sqrt(2. / (fan_in + fan_out))
    _runner(initializers.glorot_normal(), tensor_shape,
            target_mean=0., target_std=None, target_max=2 * scale) 
Example #19
Source File: initializers_test.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def test_glorot_normal(tensor_shape):
    fan_in, fan_out = initializers._compute_fans(tensor_shape)
    scale = np.sqrt(2. / (fan_in + fan_out))
    _runner(initializers.glorot_normal(), tensor_shape,
            target_mean=0., target_std=None, target_max=2 * scale) 
Example #20
Source File: initializers_test.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def test_glorot_normal(tensor_shape):
    fan_in, fan_out = initializers._compute_fans(tensor_shape)
    scale = np.sqrt(2. / (fan_in + fan_out))
    _runner(initializers.glorot_normal(), tensor_shape,
            target_mean=0., target_std=None, target_max=2 * scale) 
Example #21
Source File: keras_utils.py    From Benchmarks with MIT License 4 votes vote down vote up
def build_initializer(type, kerasDefaults, seed=None, constant=0.):
    """ Set the initializer to the appropriate Keras initializer function
        based on the input string and learning rate. Other required values
        are set to the Keras default values

        Parameters
        ----------
        type : string
            String to choose the initializer

            Options recognized: 'constant', 'uniform', 'normal',
            'glorot_uniform', 'lecun_uniform', 'he_normal'

            See the Keras documentation for a full description of the options

        kerasDefaults : list
            List of default parameter values to ensure consistency between frameworks

        seed : integer
            Random number seed

        constant : float
            Constant value (for the constant initializer only)

        Return
        ----------
        The appropriate Keras initializer function
    """

    if type == 'constant':
        return initializers.Constant(value=constant)

    elif type == 'uniform':
        return initializers.RandomUniform(minval=kerasDefaults['minval_uniform'],
                                  maxval=kerasDefaults['maxval_uniform'],
                                  seed=seed)

    elif type == 'normal':
        return initializers.RandomNormal(mean=kerasDefaults['mean_normal'],
                                  stddev=kerasDefaults['stddev_normal'],
                                  seed=seed)

# Not generally available
#    elif type == 'glorot_normal':
#        return initializers.glorot_normal(seed=seed)

    elif type == 'glorot_uniform':
        return initializers.glorot_uniform(seed=seed)

    elif type == 'lecun_uniform':
        return initializers.lecun_uniform(seed=seed)

    elif type == 'he_normal':
        return initializers.he_normal(seed=seed) 
Example #22
Source File: initializers_test.py    From DeepLearning_Wavelet-LSTM with MIT License 4 votes vote down vote up
def test_glorot_normal(tensor_shape):
    fan_in, fan_out = initializers._compute_fans(tensor_shape)
    scale = np.sqrt(2. / (fan_in + fan_out))
    _runner(initializers.glorot_normal(), tensor_shape,
            target_mean=0., target_std=None, target_max=2 * scale) 
Example #23
Source File: initializers_test.py    From DeepLearning_Wavelet-LSTM with MIT License 4 votes vote down vote up
def test_glorot_normal(tensor_shape):
    fan_in, fan_out = initializers._compute_fans(tensor_shape)
    scale = np.sqrt(2. / (fan_in + fan_out))
    _runner(initializers.glorot_normal(), tensor_shape,
            target_mean=0., target_std=None, target_max=2 * scale)