Python blocks.bricks.Identity() Examples
The following are 5
code examples of blocks.bricks.Identity().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
blocks.bricks
, or try the search function
.
Example #1
Source File: regression.py From Diffusion-Probabilistic-Models with MIT License | 5 votes |
def __init__(self, n_layers_conv, n_layers_dense_lower, n_layers_dense_upper, n_hidden_conv, n_hidden_dense_lower, n_hidden_dense_lower_output, n_hidden_dense_upper, spatial_width, n_colors, n_scales, n_temporal_basis): """ The multilayer perceptron, that provides temporal weighting coefficients for mu and sigma images. This consists of a lower segment with a convolutional MLP, and optionally with a dense MLP in parallel. The upper segment then consists of a per-pixel dense MLP (convolutional MLP with 1x1 kernel). """ super(MLP_conv_dense, self).__init__() self.n_colors = n_colors self.spatial_width = spatial_width self.n_hidden_dense_lower = n_hidden_dense_lower self.n_hidden_dense_lower_output = n_hidden_dense_lower_output self.n_hidden_conv = n_hidden_conv ## the lower layers self.mlp_conv = MultiLayerConvolution(n_layers_conv, n_hidden_conv, spatial_width, n_colors, n_scales) self.children = [self.mlp_conv] if n_hidden_dense_lower > 0 and n_layers_dense_lower > 0: n_input = n_colors*spatial_width**2 n_output = n_hidden_dense_lower_output*spatial_width**2 self.mlp_dense_lower = MLP([dense_nonlinearity] * n_layers_conv, [n_input] + [n_hidden_dense_lower] * (n_layers_conv-1) + [n_output], name='MLP dense lower', weights_init=Orthogonal(), biases_init=Constant(0)) self.children.append(self.mlp_dense_lower) else: n_hidden_dense_lower_output = 0 ## the upper layers (applied to each pixel independently) n_output = n_colors*n_temporal_basis*2 # "*2" for both mu and sigma self.mlp_dense_upper = MLP([dense_nonlinearity] * (n_layers_dense_upper-1) + [Identity()], [n_hidden_conv+n_hidden_dense_lower_output] + [n_hidden_dense_upper] * (n_layers_dense_upper-1) + [n_output], name='MLP dense upper', weights_init=Orthogonal(), biases_init=Constant(0)) self.children.append(self.mlp_dense_upper)
Example #2
Source File: test_utils.py From attention-lvcsr with MIT License | 5 votes |
def setUp(self): self.mlp = MLP([Sequence([Identity(name='id1').apply, Tanh(name='tanh1').apply], name='sequence1'), Sequence([Logistic(name='logistic1').apply, Identity(name='id2').apply, Tanh(name='tanh2').apply], name='sequence2'), Logistic(name='logistic2'), Sequence([Sequence([Logistic(name='logistic3').apply], name='sequence4').apply], name='sequence3')], [10, 5, 9, 5, 9])
Example #3
Source File: test_utils.py From attention-lvcsr with MIT License | 5 votes |
def test_find_second_and_third_level(self): found = set(find_bricks([self.mlp], lambda x: isinstance(x, Identity))) assert len(found) == 2 assert self.mlp.activations[0].children[0] in found assert self.mlp.activations[1].children[1] in found
Example #4
Source File: test_graph.py From attention-lvcsr with MIT License | 5 votes |
def test_snapshot(): x = tensor.matrix('x') linear = MLP([Identity(), Identity()], [10, 10, 10], weights_init=Constant(1), biases_init=Constant(2)) linear.initialize() y = linear.apply(x) cg = ComputationGraph(y) snapshot = cg.get_snapshot(dict(x=numpy.zeros((1, 10), dtype=theano.config.floatX))) assert len(snapshot) == 14
Example #5
Source File: __init__.py From blocks-examples with MIT License | 5 votes |
def main(save_to, num_batches): mlp = MLP([Tanh(), Identity()], [1, 10, 1], weights_init=IsotropicGaussian(0.01), biases_init=Constant(0), seed=1) mlp.initialize() x = tensor.vector('numbers') y = tensor.vector('roots') cost = SquaredError().apply(y[:, None], mlp.apply(x[:, None])) cost.name = "cost" main_loop = MainLoop( GradientDescent( cost=cost, parameters=ComputationGraph(cost).parameters, step_rule=Scale(learning_rate=0.001)), get_data_stream(range(100)), model=Model(cost), extensions=[ Timing(), FinishAfter(after_n_batches=num_batches), DataStreamMonitoring( [cost], get_data_stream(range(100, 200)), prefix="test"), TrainingDataMonitoring([cost], after_epoch=True), Checkpoint(save_to), Printing()]) main_loop.run() return main_loop