Python keras.applications.mobilenet.relu6() Examples
The following are 8
code examples of keras.applications.mobilenet.relu6().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras.applications.mobilenet
, or try the search function
.
Example #1
Source File: MobileNetV2.py From MobileNet_V2_Keras with MIT License | 6 votes |
def conv_block(inputs, filters, weight_decay, name, kernel=(3, 3), strides=(1, 1)): ''' Normal convolution block performs conv+bn+relu6 operations. :param inputs: Input Keras tensor in (B, H, W, C_in) :param filters: number of filters in the convolution layer :param name: name for the convolutional layer :param kernel: kernel size :param strides: strides for convolution :return: Output tensor in (B, H_new, W_new, filters) ''' channel_axis = 1 if K.image_data_format() == 'channels_first' else -1 x = Conv2D(filters, kernel, padding='same', use_bias=False, kernel_regularizer=l2(weight_decay), strides=strides, name=name)(inputs) x = BatchNormalization(axis=channel_axis, epsilon=1e-5,momentum=0.9,name=name+'_bn')(x) return Relu6(x, name=name+'_relu')
Example #2
Source File: mobilenet_v2.py From FaceRecognition with MIT License | 6 votes |
def _conv_block(inputs, filters, kernel, strides): """Convolution Block This function defines a 2D convolution operation with BN and relu6. # Arguments inputs: Tensor, input tensor of conv layer. filters: Integer, the dimensionality of the output space. kernel: An integer or tuple/list of 2 integers, specifying the width and height of the 2D convolution window. strides: An integer or tuple/list of 2 integers, specifying the strides of the convolution along the width and height.Can be a single integer to specify the same value for all spatial dimensions. # Returns Output tensor. """ channel_axis = 1 if K.image_data_format() == 'channels_first' else -1 x = Conv2D(filters, kernel, padding='same', strides=strides)(inputs) x = BatchNormalization(axis=channel_axis)(x) return Activation(relu6)(x)
Example #3
Source File: keras2_parser.py From MMdnn with MIT License | 5 votes |
def __init__(self, model): super(Keras2Parser, self).__init__() # load model files into Keras graph if isinstance(model, _string_types): try: # Keras 2.1.6 from keras.applications.mobilenet import relu6 from keras.applications.mobilenet import DepthwiseConv2D model = _keras.models.load_model( model, custom_objects={ 'relu6': _keras.applications.mobilenet.relu6, 'DepthwiseConv2D': _keras.applications.mobilenet.DepthwiseConv2D } ) except: # Keras. 2.2.2 import keras.layers as layers model = _keras.models.load_model( model, custom_objects={ 'relu6': layers.ReLU(6, name='relu6'), 'DepthwiseConv2D': layers.DepthwiseConv2D } ) self.weight_loaded = True elif isinstance(model, tuple): model = self._load_model(model[0], model[1]) else: assert False # _keras.utils.plot_model(model, "model.png", show_shapes = True) # Build network graph self.data_format = _keras.backend.image_data_format() self.keras_graph = Keras2Graph(model) self.keras_graph.build() self.lambda_layer_count = 0
Example #4
Source File: MobileNetV2.py From MobileNet_V2_Keras with MIT License | 5 votes |
def Relu6(x, **kwargs): return Activation(relu6, **kwargs)(x)
Example #5
Source File: mobilenet_v2.py From FaceRecognition with MIT License | 5 votes |
def _bottleneck(inputs, filters, kernel, t, s, r=False): """Bottleneck This function defines a basic bottleneck structure. # Arguments inputs: Tensor, input tensor of conv layer. filters: Integer, the dimensionality of the output space. kernel: An integer or tuple/list of 2 integers, specifying the width and height of the 2D convolution window. t: Integer, expansion factor. t is always applied to the input size. s: An integer or tuple/list of 2 integers,specifying the strides of the convolution along the width and height.Can be a single integer to specify the same value for all spatial dimensions. r: Boolean, Whether to use the residuals. # Returns Output tensor. """ channel_axis = 1 if K.image_data_format() == 'channels_first' else -1 tchannel = K.int_shape(inputs)[channel_axis] * t x = _conv_block(inputs, tchannel, (1, 1), (1, 1)) x = DepthwiseConv2D(kernel, strides=(s, s), depth_multiplier=1, padding='same')(x) x = BatchNormalization(axis=channel_axis)(x) x = Activation(relu6)(x) x = Conv2D(filters, (1, 1), strides=(1, 1), padding='same')(x) x = BatchNormalization(axis=channel_axis)(x) if r: x = add([x, inputs]) return x
Example #6
Source File: keras2_parser.py From MMdnn with MIT License | 4 votes |
def _load_model(self, model_network_path, model_weight_path): """Load a keras model from disk Parameters ---------- model_network_path: str Path where the model network path is (json file) model_weight_path: str Path where the model network weights are (hd5 file) Returns ------- model: A keras model """ from keras.models import model_from_json # Load the model network json_file = open(model_network_path, 'r') loaded_model_json = json_file.read() json_file.close() # Load the model weights try: from keras.applications.mobilenet import relu6 from keras.applications.mobilenet import DepthwiseConv2D loaded_model = model_from_json(loaded_model_json, custom_objects={ 'relu6': _keras.applications.mobilenet.relu6, 'DepthwiseConv2D': _keras.applications.mobilenet.DepthwiseConv2D}) except: import keras.layers as layers loaded_model = model_from_json(loaded_model_json, custom_objects={ 'relu6': layers.ReLU(6, name='relu6'), 'DepthwiseConv2D': layers.DepthwiseConv2D}) if model_weight_path: if os.path.isfile(model_weight_path): loaded_model.load_weights(model_weight_path) self.weight_loaded = True print("Network file [{}] and [{}] is loaded successfully.".format(model_network_path, model_weight_path)) else: print("Warning: Weights File [%s] is not found." % (model_weight_path)) return loaded_model
Example #7
Source File: _layers2.py From coremltools with BSD 3-Clause "New" or "Revised" License | 4 votes |
def _get_activation_name_from_keras_layer(keras_layer): if isinstance(keras_layer, _keras.layers.advanced_activations.LeakyReLU): non_linearity = "LEAKYRELU" elif isinstance(keras_layer, _keras.layers.advanced_activations.PReLU): non_linearity = "PRELU" elif isinstance(keras_layer, _keras.layers.advanced_activations.ELU): non_linearity = "ELU" elif isinstance(keras_layer, _keras.layers.advanced_activations.ThresholdedReLU): non_linearity = "THRESHOLDEDRELU" elif isinstance(keras_layer, _keras.layers.advanced_activations.Softmax): non_linearity = "SOFTMAX" else: import six if six.PY2: act_name = keras_layer.activation.func_name else: act_name = keras_layer.activation.__name__ if act_name == "softmax": non_linearity = "SOFTMAX" elif act_name == "sigmoid": non_linearity = "SIGMOID" elif act_name == "tanh": non_linearity = "TANH" elif act_name == "relu": non_linearity = "RELU" elif act_name == "relu6": non_linearity = "RELU6" elif act_name == "softplus": non_linearity = "SOFTPLUS" elif act_name == "softsign": non_linearity = "SOFTSIGN" elif act_name == "hard_sigmoid": non_linearity = "SIGMOID_HARD" elif act_name == "elu": non_linearity = "UNIT_ELU" elif act_name == "linear": non_linearity = "LINEAR" elif act_name == "selu": non_linearity = "SELU" else: non_linearity = "CUSTOM" return non_linearity
Example #8
Source File: test_keras2_numeric.py From coremltools with BSD 3-Clause "New" or "Revised" License | 4 votes |
def test_tiny_mobilenet_arch(self, model_precision=_MLMODEL_FULL_PRECISION): def ReLU6(x, name): if keras.__version__ >= _StrictVersion("2.2.1"): return ReLU(6.0, name=name)(x) else: return Activation(relu6, name=name)(x) img_input = Input(shape=(32, 32, 3)) x = Conv2D( 4, (3, 3), padding="same", use_bias=False, strides=(2, 2), name="conv1" )(img_input) x = BatchNormalization(axis=-1, name="conv1_bn")(x) x = ReLU6(x, name="conv1_relu") x = DepthwiseConv2D( (3, 3), padding="same", depth_multiplier=1, strides=(1, 1), use_bias=False, name="conv_dw_1", )(x) x = BatchNormalization(axis=-1, name="conv_dw_1_bn")(x) x = ReLU6(x, name="conv_dw_1_relu") x = Conv2D( 8, (1, 1), padding="same", use_bias=False, strides=(1, 1), name="conv_pw_1" )(x) x = BatchNormalization(axis=-1, name="conv_pw_1_bn")(x) x = ReLU6(x, name="conv_pw_1_relu") x = DepthwiseConv2D( (3, 3), padding="same", depth_multiplier=1, strides=(2, 2), use_bias=False, name="conv_dw_2", )(x) x = BatchNormalization(axis=-1, name="conv_dw_2_bn")(x) x = ReLU6(x, name="conv_dw_2_relu") x = Conv2D( 8, (1, 1), padding="same", use_bias=False, strides=(2, 2), name="conv_pw_2" )(x) x = BatchNormalization(axis=-1, name="conv_pw_2_bn")(x) x = ReLU6(x, name="conv_pw_2_relu") model = Model(inputs=[img_input], outputs=[x]) self._test_model(model, delta=1e-2, model_precision=model_precision)