Python keras.layers.core.Masking() Examples
The following are 18
code examples of keras.layers.core.Masking().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras.layers.core
, or try the search function
.
Example #1
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #2
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_specify_state_with_masking(layer_class): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' num_states = 2 if layer_class is recurrent.LSTM else 1 inputs = Input((timesteps, embedding_dim)) _ = Masking()(inputs) initial_state = [Input((units,)) for _ in range(num_states)] output = layer_class(units)(inputs, initial_state=initial_state) model = Model([inputs] + initial_state, output) model.compile(loss='categorical_crossentropy', optimizer='adam') inputs = np.random.random((num_samples, timesteps, embedding_dim)) initial_state = [np.random.random((num_samples, units)) for _ in range(num_states)] targets = np.random.random((num_samples, units)) model.fit([inputs] + initial_state, targets)
Example #3
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #4
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_specify_state_with_masking(layer_class): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' num_states = 2 if layer_class is recurrent.LSTM else 1 inputs = Input((timesteps, embedding_dim)) _ = Masking()(inputs) initial_state = [Input((units,)) for _ in range(num_states)] output = layer_class(units)(inputs, initial_state=initial_state) model = Model([inputs] + initial_state, output) model.compile(loss='categorical_crossentropy', optimizer='adam') inputs = np.random.random((num_samples, timesteps, embedding_dim)) initial_state = [np.random.random((num_samples, units)) for _ in range(num_states)] targets = np.random.random((num_samples, units)) model.fit([inputs] + initial_state, targets)
Example #5
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #6
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #7
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_specify_state_with_masking(layer_class): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' num_states = 2 if layer_class is recurrent.LSTM else 1 inputs = Input((timesteps, embedding_dim)) _ = Masking()(inputs) initial_state = [Input((units,)) for _ in range(num_states)] output = layer_class(units)(inputs, initial_state=initial_state) model = Model([inputs] + initial_state, output) model.compile(loss='categorical_crossentropy', optimizer='adam') inputs = np.random.random((num_samples, timesteps, embedding_dim)) initial_state = [np.random.random((num_samples, units)) for _ in range(num_states)] targets = np.random.random((num_samples, units)) model.fit([inputs] + initial_state, targets)
Example #8
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #9
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_specify_state_with_masking(layer_class): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' num_states = 2 if layer_class is recurrent.LSTM else 1 inputs = Input((timesteps, embedding_dim)) _ = Masking()(inputs) initial_state = [Input((units,)) for _ in range(num_states)] output = layer_class(units)(inputs, initial_state=initial_state) model = Model([inputs] + initial_state, output) model.compile(loss='categorical_crossentropy', optimizer='adam') inputs = np.random.random((num_samples, timesteps, embedding_dim)) initial_state = [np.random.random((num_samples, units)) for _ in range(num_states)] targets = np.random.random((num_samples, units)) model.fit([inputs] + initial_state, targets)
Example #10
Source File: model_zoo.py From visual_turing_test-tutorial with MIT License | 6 votes |
def textual_embedding(self, language_model, mask_zero): """ Note: * mask_zero only makes sense if embedding is learnt """ if self._config.textual_embedding_dim > 0: print('Textual Embedding is on') language_model.add(Embedding( self._config.input_dim, self._config.textual_embedding_dim, mask_zero=mask_zero)) else: print('Textual Embedding is off') language_model.add(Reshape( input_shape=(self._config.max_input_time_steps, self._config.input_dim), dims=(self._config.max_input_time_steps, self._config.input_dim))) if mask_zero: language_model.add(Masking(0)) return language_model
Example #11
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #12
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_specify_state_with_masking(layer_class): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' num_states = 2 if layer_class is recurrent.LSTM else 1 inputs = Input((timesteps, embedding_dim)) _ = Masking()(inputs) initial_state = [Input((units,)) for _ in range(num_states)] output = layer_class(units)(inputs, initial_state=initial_state) model = Model([inputs] + initial_state, output) model.compile(loss='categorical_crossentropy', optimizer='adam') inputs = np.random.random((num_samples, timesteps, embedding_dim)) initial_state = [np.random.random((num_samples, units)) for _ in range(num_states)] targets = np.random.random((num_samples, units)) model.fit([inputs] + initial_state, targets)
Example #13
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #14
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_specify_state_with_masking(layer_class): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' num_states = 2 if layer_class is recurrent.LSTM else 1 inputs = Input((timesteps, embedding_dim)) _ = Masking()(inputs) initial_state = [Input((units,)) for _ in range(num_states)] output = layer_class(units)(inputs, initial_state=initial_state) model = Model([inputs] + initial_state, output) model.compile(loss='categorical_crossentropy', optimizer='adam') inputs = np.random.random((num_samples, timesteps, embedding_dim)) initial_state = [np.random.random((num_samples, units)) for _ in range(num_states)] targets = np.random.random((num_samples, units)) model.fit([inputs] + initial_state, targets)
Example #15
Source File: recurrent_test.py From DeepLearning_Wavelet-LSTM with MIT License | 6 votes |
def test_masking_layer(): ''' This test based on a previously failing issue here: https://github.com/keras-team/keras/issues/1567 ''' inputs = np.random.random((6, 3, 4)) targets = np.abs(np.random.random((6, 3, 5))) targets /= targets.sum(axis=-1, keepdims=True) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=False)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1) model = Sequential() model.add(Masking(input_shape=(3, 4))) model.add(recurrent.SimpleRNN(units=5, return_sequences=True, unroll=True)) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(inputs, targets, epochs=1, batch_size=100, verbose=1)
Example #16
Source File: model_zoo.py From visual_turing_test-tutorial with MIT License | 6 votes |
def textual_embedding_fixed_length(self, language_model, mask_zero): """ In contrast to textual_embedding, it produces a fixed length output. """ if self._config.textual_embedding_dim > 0: print('Textual Embedding with fixed length is on') language_model.add(Embedding( self._config.input_dim, self._config.textual_embedding_dim, input_length=self._config.max_input_time_steps, mask_zero=mask_zero)) else: print('Textual Embedding with fixed length is off') language_model.add(Reshape( input_shape=(self._config.max_input_time_steps, self._config.input_dim), dims=(self._config.max_input_time_steps, self._config.input_dim))) if mask_zero: language_model.add(Masking(0)) return language_model
Example #17
Source File: test_core.py From CAPTCHA-breaking with MIT License | 5 votes |
def test_non_zero(self): """Test masking with non-zero mask value""" layer = core.Masking(5) func = theano.function([layer.input], layer.get_output_mask()) self.assertTrue(np.all( # get mask for this input, if not all the values are 5, shouldn't masked func(np.array( [[[1, 1], [2, 1], [3, 1], [5, 5]], [[1, 5], [5, 0], [0, 0], [0, 0]]], dtype=np.int32)) == # This is the expected output mask, one dimension less np.array([[1, 1, 1, 0], [1, 1, 1, 1]])))
Example #18
Source File: test_core.py From CAPTCHA-breaking with MIT License | 5 votes |
def test_sequences(self): """Test masking sequences with zeroes as padding""" # integer inputs, one per timestep, like embeddings layer = core.Masking() func = theano.function([layer.input], layer.get_output_mask()) self.assertTrue(np.all( # get mask for this input func(np.array( [[[1], [2], [3], [0]], [[0], [4], [5], [0]]], dtype=np.int32)) == # This is the expected output mask, one dimension less np.array([[1, 1, 1, 0], [0, 1, 1, 0]])))