Python keras.layers.core.Layer() Examples
The following are 18
code examples of keras.layers.core.Layer().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras.layers.core
, or try the search function
.
Example #1
Source File: test_core.py From CAPTCHA-breaking with MIT License | 6 votes |
def test_connections(self): nb_samples = 10 input_dim = 5 layer1 = core.Layer() layer2 = core.Layer() input = np.ones((nb_samples, input_dim)) layer1.input = theano.shared(value=input) # As long as there is no previous layer, an error should be raised. for train in [True, False]: self.assertRaises(AttributeError, layer2.get_input, train) # After connecting, input of layer1 should be passed through layer2.set_previous(layer1) for train in [True, False]: assert_allclose(layer2.get_input(train).eval(), input) assert_allclose(layer2.get_output(train).eval(), input)
Example #2
Source File: keras2_emitter.py From MMdnn with MIT License | 6 votes |
def _layer_Shape(self): self.add_body(0, ''' def __shape(input): return Lambda(lambda x: tf.shape(x))(input) ''') # def _layer_Constant(self): # self.add_body(0, ''' # class my_constant(keras.layers.Layer): # def __init__(self, value, **kwargs): # super(my_constant, self).__init__(**kwargs) # self._value = value # # the input is dummy, just for creating keras graph. # def call(self, dummy): # res = K.constant(self._value) # self.output_shapes = K.int_shape(res) # return res # def compute_output_shape(self, input_shape): # return self.output_shapes # ''')
Example #3
Source File: keras2_emitter.py From MMdnn with MIT License | 6 votes |
def _layer_Affine(self): self.add_body(0, ''' from keras.engine import Layer, InputSpec from keras import initializers from keras import backend as K class Affine(Layer): def __init__(self, scale, bias=None, **kwargs): super(Affine, self).__init__(**kwargs) self.gamma = scale self.beta = bias def call(self, inputs, training=None): input_shape = K.int_shape(inputs) # Prepare broadcasting shape. return self.gamma * inputs + self.beta def compute_output_shape(self, input_shape): return input_shape ''')
Example #4
Source File: keras2_emitter.py From MMdnn with MIT License | 6 votes |
def _emit_h_zero(self, IR_node): if not self.layers_codes.get(IR_node.pattern, None): class_code = ''' class my_h_zero(keras.layers.Layer): def __init__(self, **kwargs): super(my_h_zero, self).__init__(**kwargs) def call(self, dummy): {:<15} = K.constant(np.full((1, {}), {})) return {} '''.format(IR_node.variable_name, IR_node.get_attr('fill_size'), IR_node.get_attr('fill_value'), IR_node.variable_name) self.layers_codes[IR_node.pattern] = class_code code = "{:<15} = my_h_zero()({})".format(IR_node.variable_name, self.parent_variable_name(IR_node)) return code
Example #5
Source File: keras2_emitter.py From MMdnn with MIT License | 6 votes |
def emit_Slice(self, IR_node, in_scope=False): # It arouses some problems: # it can be implemented by Lambda Layer # https://github.com/keras-team/keras/issues/890 self.used_layers.add(IR_node.type) extra_str = "" if IR_node.get_attr('strides'): extra_str += "strides={}".format(IR_node.get_attr('strides')) if IR_node.get_attr('begin_mask'): extra_str += ", begin_mask={}".format(IR_node.get_attr('begin_mask')) if IR_node.get_attr('end_mask'): extra_str += ", end_mask={}".format(IR_node.get_attr('end_mask')) if IR_node.get_attr('shrink_axis_mask'): extra_str += ", shrink_axis_mask={}".format(IR_node.get_attr('shrink_axis_mask')) code = "{:<15} = __slice({}, {}, {}, {})".format( IR_node.variable_name, self.parent_variable_name(IR_node), IR_node.get_attr('starts'), IR_node.get_attr('ends'), extra_str) return code
Example #6
Source File: test_core.py From CAPTCHA-breaking with MIT License | 6 votes |
def test_input_output(self): nb_samples = 10 input_dim = 5 layer = core.Layer() # As long as there is no input, an error should be raised. for train in [True, False]: self.assertRaises(AttributeError, layer.get_input, train) self.assertRaises(AttributeError, layer.get_output, train) # Once an input is provided, it should be reachable through the # appropriate getters input = np.ones((nb_samples, input_dim)) layer.input = theano.shared(value=input) for train in [True, False]: assert_allclose(layer.get_input(train).eval(), input) assert_allclose(layer.get_output(train).eval(), input)
Example #7
Source File: test_core.py From CAPTCHA-breaking with MIT License | 5 votes |
def test_autoencoder(self): layer_1 = core.Layer() layer_2 = core.Layer() layer = core.AutoEncoder(layer_1, layer_2) self._runner(layer)
Example #8
Source File: test_core.py From CAPTCHA-breaking with MIT License | 5 votes |
def test_merge(self): layer_1 = core.Layer() layer_2 = core.Layer() layer = core.Merge([layer_1, layer_2]) self._runner(layer)
Example #9
Source File: keras2_emitter.py From MMdnn with MIT License | 5 votes |
def _layer_LRN(self): self.add_body(0, ''' from keras.layers.core import Layer class LRN(Layer): def __init__(self, size=5, alpha=0.0005, beta=0.75, k=2, **kwargs): self.n = size self.alpha = alpha self.beta = beta self.k = k super(LRN, self).__init__(**kwargs) def build(self, input_shape): self.shape = input_shape super(LRN, self).build(input_shape) def call(self, x, mask=None): half_n = int(self.n/2) squared = K.square(x) scale = self.k norm_alpha = self.alpha / self.n if (K.image_data_format() == 'channels_first'): b, f, r, c = self.shape squared = K.expand_dims(squared, 0) squared = K.spatial_3d_padding(squared, padding=((half_n, half_n), (0, 0), (0,0))) squared = K.squeeze(squared, 0) for i in range(self.n): scale += norm_alpha * squared[:, i:i+f, :, :] else: b, r, c, f = self.shape squared = K.expand_dims(squared, -1) squared = K.spatial_3d_padding(squared, padding=((0, 0), (0,0), (half_n, half_n))) squared = K.squeeze(squared, -1) for i in range(self.n): scale += norm_alpha * squared[:, :, :, i:i+f] scale = K.pow(scale, self.beta) return x / scale def compute_output_shape(self, input_shape): return input_shape''')
Example #10
Source File: test_core.py From CAPTCHA-breaking with MIT License | 5 votes |
def test_base(self): layer = core.Layer() self._runner(layer)
Example #11
Source File: keras2_emitter.py From MMdnn with MIT License | 5 votes |
def _layer_Mul(self): self.add_body(0, ''' class my_mul(keras.layers.Layer): def __init__(self, **kwargs): super(my_mul, self).__init__(**kwargs) def call(self, inputs): res = inputs[0] * inputs[1] self.output_shapes = K.int_shape(res) return res def compute_output_shape(self, input_shape): return self.output_shapes ''')
Example #12
Source File: keras2_emitter.py From MMdnn with MIT License | 5 votes |
def _layer_Add(self): self.add_body(0, ''' class my_add(keras.layers.Layer): def __init__(self, **kwargs): super(my_add, self).__init__(**kwargs) def call(self, inputs): res = inputs[0] + inputs[1] self.output_shapes = K.int_shape(res) return res def compute_output_shape(self, input_shape): return self.output_shapes ''')
Example #13
Source File: keras2_emitter.py From MMdnn with MIT License | 5 votes |
def _layer_Sub(self): self.add_body(0, ''' class my_sub(keras.layers.Layer): def __init__(self, **kwargs): super(my_sub, self).__init__(**kwargs) def call(self, inputs): res = inputs[0] - inputs[1] self.output_shapes = K.int_shape(res) return res def compute_output_shape(self, input_shape): return self.output_shapes ''')
Example #14
Source File: girthy.py From keras-rtst with MIT License | 5 votes |
def create_res_texture_net(input_rows, input_cols, num_res_filters=128, res_out_activation='linear', activation='relu', num_res_blocks=5, depth=3): '''Adds a series of residual blocks at each resolution scale, rather than just the minimium one. ''' net = Graph() net.add_input('x', input_shape=(3, input_rows, input_cols)) add_conv_block(net, 'in0', 'x', num_res_filters // 4, 9, activation=activation) last_name = 'in0' # scale down input to max depth with a series of strided convolutions for scale_i in range(depth): num_scale_filters = num_res_filters - scale_i * 8 # // (2 ** scale_i) # (depth - scale_i - 1)) scale_name = 'down_{}'.format(scale_i) add_conv_block(net, scale_name, last_name, num_scale_filters, 3, subsample=(2, 2), activation=activation) last_name = scale_name # add a series of residual blocks at each scale, from smallest to largest for scale_i in reversed(range(depth)): num_scale_filters = num_res_filters - scale_i * 8 # // (2 ** scale_i) # (depth - scale_i - 1)) last_scale_name = last_name for res_i in range(num_res_blocks): block_name = 'res_{}_{}'.format(scale_i, res_i) add_conv_block(net, block_name + '_b0', last_name, num_res_filters, 3, activation=activation) add_conv_block(net, block_name + '_b1', block_name + '_b0', num_res_filters, 1, activation='linear') if last_name == last_scale_name: # tranform residual connection to same number of filters add_conv_block(net, block_name + '_res', last_name, num_res_filters, 1, activation='linear') else: # no transform needed when the last node was part of the current residual block net.add_node(Layer(), block_name + '_res', last_name) net.add_node(Activation(res_out_activation), block_name, merge_mode='sum', inputs=[block_name + '_b1', block_name + '_res']) last_name = block_name # theano doesn't seem to support fractionally-strided convolutions at the moment up_name = 'up_{}'.format(scale_i) net.add_node(UpSampling2D(), up_name, last_name) last_name = up_name last_scale_name = up_name # final output add_conv_block(net, 'out', last_name, 3, 9, activation='linear') net.add_node(Activation('linear'), 'texture_rgb', 'out', create_output=True) return net
Example #15
Source File: rbm.py From keras_extensions with MIT License | 5 votes |
def get_h_given_x_layer(self, as_initial_layer=False): """ Generates a new Dense Layer that computes mean of Bernoulli distribution p(h|x), ie. p(h=1|x). """ if as_initial_layer: layer = Dense(input_dim=self.input_dim, output_dim=self.hidden_dim, activation='sigmoid', weights=[self.W.get_value(), self.bh.get_value()]) else: layer = Dense(output_dim=self.hidden_dim, activation='sigmoid', weights=[self.W.get_value(), self.bh.get_value()]) return layer
Example #16
Source File: rbm.py From keras_extensions with MIT License | 5 votes |
def get_x_given_h_layer(self, as_initial_layer=False): """ Generates a new Dense Layer that computes mean of Bernoulli distribution p(x|h), ie. p(x=1|h). """ if as_initial_layer: layer = Dense(input_dim=self.hidden_dim, output_dim=self.input_dim, activation='sigmoid', weights=[self.W.get_value().T, self.bx.get_value()]) else: layer = Dense(output_dim=self.input_dim, activation='sigmoid', weights=[self.W.get_value().T, self.bx.get_value()]) return layer
Example #17
Source File: rbm.py From keras_extensions with MIT License | 5 votes |
def get_x_given_h_layer(self, as_initial_layer=False): """ Generates a new Dense Layer that computes mean of Gaussian distribution p(x|h). """ if not as_initial_layer: layer = Dense(output_dim=self.input_dim, activation='linear', weights=[self.W.get_value().T, self.bx.get_value()]) else: layer = Dense(input_dim=self.hidden_dim, output_dim=self.input_dim, activation='linear', weights=[self.W.get_value().T, self.bx.get_value()]) return layer
Example #18
Source File: keras2_emitter.py From MMdnn with MIT License | 4 votes |
def _gen_scope_code(self, scope_node): def _scope_func(scope_name, params, code, return_var): if len(return_var) > 1: return_var_code = '[{}]'.format(', '.join(return_var)) output_shape_code = ' self.output_shapes = [{}]\n'.format(', '.join(['K.int_shape(%s)' %s for s in return_var])) else: return_var_code = ', '.join(return_var) output_shape_code = ' self.output_shapes = K.int_shape({})\n'.format(return_var[0]) code = """ class my_{}(keras.layers.Layer): def __init__(self, **kwargs): super(my_{}, self).__init__(**kwargs) def call(self, inputs): {} {} {} return {} def compute_output_shape(self, input_shape): return self.output_shapes """.format(scope_name, scope_name, params, code, output_shape_code, return_var_code) return code if not self.layers_codes.get(scope_node.pattern, None): body_code = str() for node_name in scope_node.topology_list: node = self.IR_graph.get_node(node_name) node_type = node.type if hasattr(self, "emit_" + node_type): func = getattr(self, "emit_" + node_type) line = func(node, True) if line != None: body_code += " " + line + '\n' else: print("KerasEmitter has not supported operator [%s]." % (node_type)) self.emit_UNKNOWN(node) # param_code does not need parameter slice. input_params = scope_node.input_params param_code = str() import re for i, p in enumerate(scope_node.in_edges): p_node = self.IR_graph.get_node(p) if p_node.type == 'Scope' and len(p_node.return_variables) > 1 and ':' not in p: # input is a list. param_code += " {} = [{}]\n".format(p_node.variable_name, ', '.join('inputs[%s]'%s for s in range(i, i + len(p_node.return_variables)))) else: param_code += " {} = inputs[{}]\n".format(p_node.variable_name, i) function_code = _scope_func(scope_node.pattern, param_code, body_code, scope_node.return_variables) self.layers_codes[scope_node.pattern] = function_code return body_code