Python keras.initializers.orthogonal() Examples
The following are 19
code examples of keras.initializers.orthogonal().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras.initializers
, or try the search function
.
Example #1
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_glorot_normal_1(input_shape): # `"glorot_normal"` model = Sequential( [ Dense(Integer(50, 100), input_shape=input_shape), Dense(1, kernel_initializer="glorot_normal"), ] ) model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"]) return model #################### `orthogonal` - Excluding default (`Initializer`) ####################
Example #2
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def test_in_custom_arg_callable(self, old_opt, new_opt): assert in_similar_experiment_ids(old_opt, new_opt) ################################################## # `orthogonal` - Including default (`Initializer`) ##################################################
Example #3
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_categorical_1(input_shape): # `Categorical([glorot_normal(), orthogonal()])` model = Sequential( [ Dense(Integer(50, 100), input_shape=input_shape), Dense(1, kernel_initializer=Categorical([glorot_normal(), orthogonal()])), ] ) model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"]) return model
Example #4
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_categorical_0(input_shape): # `Categorical(["glorot_normal", "orthogonal"])` model = Sequential( [ Dense(Integer(50, 100), input_shape=input_shape), Dense(1, kernel_initializer=Categorical(["glorot_normal", "orthogonal"])), ] ) model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"]) return model
Example #5
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_orthogonal_i_5(input_shape): # `orthogonal(gain=Real(0.6, 1.6))` model = Sequential( [ Dense(Integer(50, 100), input_shape=input_shape), Dense(1, kernel_initializer=orthogonal(gain=Real(0.6, 1.6))), ] ) model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"]) return model
Example #6
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_orthogonal_i_3(input_shape): # `orthogonal(gain=1.0)` model = Sequential( [ Dense(Integer(50, 100), input_shape=input_shape), Dense(1, kernel_initializer=orthogonal(gain=1.0)), ] ) model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"]) return model
Example #7
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_orthogonal_i_1(input_shape): # `orthogonal()` model = Sequential( [ Dense(Integer(50, 100), input_shape=input_shape), Dense(1, kernel_initializer=orthogonal()), ] ) model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"]) return model
Example #8
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_orthogonal_e_3(input_shape): # `Orthogonal(gain=0.5)` model = Sequential( [ Dense(Integer(50, 100), input_shape=input_shape), Dense(1, kernel_initializer=Orthogonal(gain=0.5)), ] ) model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"]) return model #################### `orthogonal` - Including default (`Initializer`) ####################
Example #9
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_orthogonal_e_2(input_shape): # `orthogonal(gain=0.5)` model = Sequential( [ Dense(Integer(50, 100), input_shape=input_shape), Dense(1, kernel_initializer=orthogonal(gain=0.5)), ] ) model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"]) return model
Example #10
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_orthogonal_e_0(input_shape): # `orthogonal(gain=Real(0.3, 0.9))` model = Sequential( [ Dense(Integer(50, 100), input_shape=input_shape), Dense(1, kernel_initializer=orthogonal(gain=Real(0.3, 0.9))), ] ) model.compile(optimizer="adam", loss="binary_crossentropy", metrics=["accuracy"]) return model
Example #11
Source File: initializers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def test_orthogonal(tensor_shape): _runner(initializers.orthogonal(), tensor_shape, target_mean=0.)
Example #12
Source File: test_keras.py From hyperparameter_hunter with MIT License | 5 votes |
def _build_fn_regressor(input_shape): model = Sequential( [ Dense(100, activation="relu", input_shape=input_shape), Dense(Integer(40, 60), activation="relu", kernel_initializer="glorot_normal"), Dropout(Real(0.2, 0.7)), Dense(1, activation=Categorical(["relu", "sigmoid"]), kernel_initializer="orthogonal"), ] ) model.compile( optimizer=Categorical(["adam", "rmsprop"]), loss="mean_absolute_error", metrics=["mean_absolute_error"], ) return model
Example #13
Source File: submission_v50.py From Quora with MIT License | 5 votes |
def get_model(embed_weights): input_layer = Input(shape=(MAX_LEN, ), name='input') # 1. embedding layer # get embedding weights print('load pre-trained embedding weights ......') input_dim = embed_weights.shape[0] output_dim = embed_weights.shape[1] x = Embedding( input_dim=input_dim, output_dim=output_dim, weights=[embed_weights], trainable=False, name='embedding' )(input_layer) # clean up del embed_weights, input_dim, output_dim gc.collect() # 2. dropout x = SpatialDropout1D(rate=SPATIAL_DROPOUT)(x) # 3. bidirectional lstm x = Bidirectional( layer=CuDNNLSTM(RNN_UNITS, return_sequences=True, kernel_initializer=glorot_normal(seed=1029), recurrent_initializer=orthogonal(gain=1.0, seed=1029)), name='bidirectional_lstm')(x) # 4. capsule layer capsul = Capsule(num_capsule=10, dim_capsule=10, routings=4, share_weights=True)(x) # noqa capsul = Flatten()(capsul) capsul = DropConnect(Dense(32, activation="relu"), prob=0.01)(capsul) # 5. attention later atten = Attention(step_dim=MAX_LEN, name='attention')(x) atten = DropConnect(Dense(16, activation="relu"), prob=0.05)(atten) x = Concatenate(axis=-1)([capsul, atten]) # 6. output (sigmoid) output_layer = Dense(units=1, activation='sigmoid', name='output')(x) model = Model(inputs=input_layer, outputs=output_layer) # compile model model.compile(loss='binary_crossentropy', optimizer='adam') return model
Example #14
Source File: initializers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def test_orthogonal(tensor_shape): _runner(initializers.orthogonal(), tensor_shape, target_mean=0.)
Example #15
Source File: initializers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def test_orthogonal(tensor_shape): _runner(initializers.orthogonal(), tensor_shape, target_mean=0.)
Example #16
Source File: initializers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def test_orthogonal(tensor_shape): _runner(initializers.orthogonal(), tensor_shape, target_mean=0.)
Example #17
Source File: initializers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def test_orthogonal(tensor_shape): _runner(initializers.orthogonal(), tensor_shape, target_mean=0.)
Example #18
Source File: initializers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def test_orthogonal(tensor_shape): _runner(initializers.orthogonal(), tensor_shape, target_mean=0.)
Example #19
Source File: initializers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def test_orthogonal(tensor_shape): _runner(initializers.orthogonal(), tensor_shape, target_mean=0.)