Python keras.layers.recurrent.SimpleRNN() Examples
The following are 19
code examples of keras.layers.recurrent.SimpleRNN().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras.layers.recurrent
, or try the search function
.
Example #1
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #2
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #3
Source File: rnn.py From neuroevolution with MIT License | 6 votes |
def create_rnn(): """Create a recurrent neural network to compute a control policy. Reference: Koutnik, Jan, Jurgen Schmidhuber, and Faustino Gomez. "Evolving deep unsupervised convolutional networks for vision-based reinforcement learning." Proceedings of the 2014 conference on Genetic and evolutionary computation. ACM, 2014. """ model = Sequential() model.add(SimpleRNN(output_dim=3, stateful=True, batch_input_shape=(1, 1, 3))) model.add(Dense(input_dim=3, output_dim=3)) model.compile(loss='mse', optimizer='rmsprop') return model
Example #4
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #5
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #6
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #7
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #8
Source File: model_all_stacked.py From neural_complete with MIT License | 5 votes |
def construct_model(maxlen, input_dimension, output_dimension, lstm_vector_output_dim): """ Склеены три слова """ input = Input(shape=(maxlen, input_dimension), name='input') # lstm_encode = LSTM(lstm_vector_output_dim)(input) lstm_encode = SimpleRNN(lstm_vector_output_dim, activation='relu')(input) encoded_copied = RepeatVector(n=maxlen)(lstm_encode) # lstm_decode = LSTM(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied) lstm_decode = SimpleRNN(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied) encoder = Model(input, lstm_decode) adam = Adam() encoder.compile(loss='categorical_crossentropy', optimizer=adam) return encoder
Example #9
Source File: test_recurrent.py From CAPTCHA-breaking with MIT License | 5 votes |
def test_simple(self): _runner(recurrent.SimpleRNN)
Example #10
Source File: model.py From neural_complete with MIT License | 5 votes |
def construct_model(maxlen, input_dimension, output_dimension, lstm_vector_output_dim): """ Склеены три слова """ input = Input(shape=(maxlen, input_dimension), name='input') # lstm_encode = LSTM(lstm_vector_output_dim)(input) lstm_encode = SimpleRNN(lstm_vector_output_dim, activation='sigmoid')(input) encoded_copied = RepeatVector(n=maxlen)(lstm_encode) # lstm_decode = LSTM(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied) lstm_decode = SimpleRNN(output_dim=output_dimension, return_sequences=True, activation='softmax')(encoded_copied) decoded = TimeDistributed(Dense(output_dimension, activation='softmax'))(lstm_decode) encoder_decoder = Model(input, decoded) adam = Adam() encoder_decoder.compile(loss='categorical_crossentropy', optimizer=adam) return encoder_decoder
Example #11
Source File: recurrent.py From neuralforecast with MIT License | 5 votes |
def build(self, input_shape): self.input_spec = [InputSpec(shape=input_shape)] if self.stateful: self.reset_states() else: # initial states: all-zero tensor of shape (output_dim) self.states = [None] input_dim = input_shape[2] self.input_dim = input_dim self.W = self.init((input_dim, self.output_dim), name='{}_W'.format(self.name)) # Only change in build compared to SimpleRNN: # U is of shape (inner_input_dim, output_dim) now. self.U = self.inner_init((self.inner_input_dim, self.output_dim), name='{}_U'.format(self.name)) self.b = K.zeros((self.output_dim,), name='{}_b'.format(self.name)) self.regularizers = [] if self.W_regularizer: self.W_regularizer.set_param(self.W) self.regularizers.append(self.W_regularizer) if self.U_regularizer: self.U_regularizer.set_param(self.U) self.regularizers.append(self.U_regularizer) if self.b_regularizer: self.b_regularizer.set_param(self.b) self.regularizers.append(self.b_regularizer) self.trainable_weights = [self.W, self.U, self.b] if self.initial_weights is not None: self.set_weights(self.initial_weights) del self.initial_weights
Example #12
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def rnn_test(f): """ All the recurrent layers share the same interface, so we can run through them with a single function. """ f = keras_test(f) return pytest.mark.parametrize('layer_class', [ recurrent.SimpleRNN, recurrent.GRU, recurrent.LSTM ])(f)
Example #13
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def rnn_test(f): """ All the recurrent layers share the same interface, so we can run through them with a single function. """ f = keras_test(f) return pytest.mark.parametrize('layer_class', [ recurrent.SimpleRNN, recurrent.GRU, recurrent.LSTM ])(f)
Example #14
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def rnn_test(f): """ All the recurrent layers share the same interface, so we can run through them with a single function. """ f = keras_test(f) return pytest.mark.parametrize('layer_class', [ recurrent.SimpleRNN, recurrent.GRU, recurrent.LSTM ])(f)
Example #15
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def rnn_test(f): """ All the recurrent layers share the same interface, so we can run through them with a single function. """ f = keras_test(f) return pytest.mark.parametrize('layer_class', [ recurrent.SimpleRNN, recurrent.GRU, recurrent.LSTM ])(f)
Example #16
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def rnn_test(f): """ All the recurrent layers share the same interface, so we can run through them with a single function. """ f = keras_test(f) return pytest.mark.parametrize('layer_class', [ recurrent.SimpleRNN, recurrent.GRU, recurrent.LSTM ])(f)
Example #17
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def rnn_test(f): """ All the recurrent layers share the same interface, so we can run through them with a single function. """ f = keras_test(f) return pytest.mark.parametrize('layer_class', [ recurrent.SimpleRNN, recurrent.GRU, recurrent.LSTM ])(f)
Example #18
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def rnn_test(f): """ All the recurrent layers share the same interface, so we can run through them with a single function. """ f = keras_test(f) return pytest.mark.parametrize('layer_class', [ recurrent.SimpleRNN, recurrent.GRU, recurrent.LSTM ])(f)
Example #19
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def rnn_test(f): """ All the recurrent layers share the same interface, so we can run through them with a single function. """ f = keras_test(f) return pytest.mark.parametrize('layer_class', [ recurrent.SimpleRNN, recurrent.GRU, recurrent.LSTM ])(f)