Python nets.mobilenet.mobilenet.mobilenet() Examples

The following are 30 code examples of nets.mobilenet.mobilenet.mobilenet(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module nets.mobilenet.mobilenet , or try the search function .
Example #1
Source File: mobilenet_v2.py    From CVTron with Apache License 2.0 6 votes vote down vote up
def training_scope(**kwargs):
  """Defines MobilenetV2 training scope.

  Usage:
     with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
       logits, endpoints = mobilenet_v2.mobilenet(input_tensor)

  with slim.

  Args:
    **kwargs: Passed to mobilenet.training_scope. The following parameters
    are supported:
      weight_decay- The weight decay to use for regularizing the model.
      stddev-  Standard deviation for initialization, if negative uses xavier.
      dropout_keep_prob- dropout keep probability
      bn_decay- decay for the batch norm moving averages.

  Returns:
    An `arg_scope` to use for the mobilenet v2 model.
  """
  return lib.training_scope(**kwargs) 
Example #2
Source File: mobilenet_v2.py    From tf_ctpn with MIT License 6 votes vote down vote up
def training_scope(**kwargs):
    """Defines MobilenetV2 training scope.
    Usage:
       with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
         logits, endpoints = mobilenet_v2.mobilenet(input_tensor)
    with slim.
    Args:
      **kwargs: Passed to mobilenet.training_scope. The following parameters
      are supported:
        weight_decay- The weight decay to use for regularizing the model.
        stddev-  Standard deviation for initialization, if negative uses xavier.
        dropout_keep_prob- dropout keep probability
        bn_decay- decay for the batch norm moving averages.
    Returns:
      An `arg_scope` to use for the mobilenet v2 model.
    """
    return lib.training_scope(**kwargs) 
Example #3
Source File: mobilenet_v2.py    From edafa with MIT License 6 votes vote down vote up
def training_scope(**kwargs):
  """Defines MobilenetV2 training scope.

  Usage:
     with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
       logits, endpoints = mobilenet_v2.mobilenet(input_tensor)

  with slim.

  Args:
    **kwargs: Passed to mobilenet.training_scope. The following parameters
    are supported:
      weight_decay- The weight decay to use for regularizing the model.
      stddev-  Standard deviation for initialization, if negative uses xavier.
      dropout_keep_prob- dropout keep probability
      bn_decay- decay for the batch norm moving averages.

  Returns:
    An `arg_scope` to use for the mobilenet v2 model.
  """
  return lib.training_scope(**kwargs) 
Example #4
Source File: mobilenet_v2.py    From DeepLab_v3 with MIT License 6 votes vote down vote up
def training_scope(**kwargs):
  """Defines MobilenetV2 training scope.

  Usage:
     with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
       logits, endpoints = mobilenet_v2.mobilenet(input_tensor)

  with slim.

  Args:
    **kwargs: Passed to mobilenet.training_scope. The following parameters
    are supported:
      weight_decay- The weight decay to use for regularizing the model.
      stddev-  Standard deviation for initialization, if negative uses xavier.
      dropout_keep_prob- dropout keep probability
      bn_decay- decay for the batch norm moving averages.

  Returns:
    An `arg_scope` to use for the mobilenet v2 model.
  """
  return lib.training_scope(**kwargs) 
Example #5
Source File: mobilenet_v2_test.py    From RetinaNet_Tensorflow_Rotation with MIT License 5 votes vote down vote up
def testWithSplits(self):
    spec = copy.deepcopy(mobilenet_v2.V2_DEF)
    spec['overrides'] = {
        (ops.expanded_conv,): dict(split_expansion=2),
    }
    _, _ = mobilenet.mobilenet(
        tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec)
    num_convs = len(find_ops('Conv2D'))
    # All but 3 op has 3 conv operatore, the remainign 3 have one
    # and there is one unaccounted.
    self.assertEqual(num_convs, len(spec['spec']) * 3 - 5) 
Example #6
Source File: mobilenet_v2_test.py    From RetinaNet_Tensorflow_Rotation with MIT License 5 votes vote down vote up
def testCreationNoClasses(self):
    spec = copy.deepcopy(mobilenet_v2.V2_DEF)
    net, ep = mobilenet.mobilenet(
        tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec,
        num_classes=None)
    self.assertIs(net, ep['global_pool']) 
Example #7
Source File: mobilenet_v2_test.py    From edafa with MIT License 5 votes vote down vote up
def testBatchNormScopeDoesNotHaveIsTrainingWhenItsSetToNone(self):
    sc = mobilenet.training_scope(is_training=None)
    self.assertNotIn('is_training', sc[slim.arg_scope_func_key(
        slim.batch_norm)]) 
Example #8
Source File: mobilenet_v2_test.py    From edafa with MIT License 5 votes vote down vote up
def testWithOutputStride16AndExplicitPadding(self):
    tf.reset_default_graph()
    out, _ = mobilenet.mobilenet_base(
        tf.placeholder(tf.float32, (10, 224, 224, 16)),
        conv_defs=mobilenet_v2.V2_DEF,
        output_stride=16,
        use_explicit_padding=True)
    self.assertEqual(out.get_shape().as_list()[1:3], [14, 14]) 
Example #9
Source File: mobilenet_v2_test.py    From edafa with MIT License 5 votes vote down vote up
def testWithOutputStride8AndExplicitPadding(self):
    tf.reset_default_graph()
    out, _ = mobilenet.mobilenet_base(
        tf.placeholder(tf.float32, (10, 224, 224, 16)),
        conv_defs=mobilenet_v2.V2_DEF,
        output_stride=8,
        use_explicit_padding=True,
        scope='MobilenetV2')
    self.assertEqual(out.get_shape().as_list()[1:3], [28, 28]) 
Example #10
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testFineGrained(self):
    tf.reset_default_graph()
    # Verifies that depth_multiplier arg scope actually works
    # if no default min_depth is provided.

    mobilenet_v2.mobilenet(
        tf.placeholder(tf.float32, (10, 224, 224, 2)),
        conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.01,
        finegrain_classification_mode=True)
    s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')]
    s = set(s)
    # All convolutions will be 8->48, except for the last one.
    self.assertSameElements(s, [8, 48, 1001, 1280]) 
Example #11
Source File: mobilenet_v2_test.py    From edafa with MIT License 5 votes vote down vote up
def testWithOutputStride16(self):
    tf.reset_default_graph()
    out, _ = mobilenet.mobilenet_base(
        tf.placeholder(tf.float32, (10, 224, 224, 16)),
        conv_defs=mobilenet_v2.V2_DEF,
        output_stride=16)
    self.assertEqual(out.get_shape().as_list()[1:3], [14, 14]) 
Example #12
Source File: mobilenet_v2_test.py    From edafa with MIT License 5 votes vote down vote up
def testBatchNormScopeDoesHasIsTrainingWhenItsNotNone(self):
    sc = mobilenet.training_scope(is_training=False)
    self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)])
    sc = mobilenet.training_scope(is_training=True)
    self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)])
    sc = mobilenet.training_scope()
    self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)]) 
Example #13
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testBatchNormScopeDoesNotHaveIsTrainingWhenItsSetToNone(self):
    sc = mobilenet.training_scope(is_training=None)
    self.assertNotIn('is_training', sc[slim.arg_scope_func_key(
        slim.batch_norm)]) 
Example #14
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testWithOutputStride16AndExplicitPadding(self):
    tf.reset_default_graph()
    out, _ = mobilenet.mobilenet_base(
        tf.placeholder(tf.float32, (10, 224, 224, 16)),
        conv_defs=mobilenet_v2.V2_DEF,
        output_stride=16,
        use_explicit_padding=True)
    self.assertEqual(out.get_shape().as_list()[1:3], [14, 14]) 
Example #15
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testWithOutputStride8AndExplicitPadding(self):
    tf.reset_default_graph()
    out, _ = mobilenet.mobilenet_base(
        tf.placeholder(tf.float32, (10, 224, 224, 16)),
        conv_defs=mobilenet_v2.V2_DEF,
        output_stride=8,
        use_explicit_padding=True,
        scope='MobilenetV2')
    self.assertEqual(out.get_shape().as_list()[1:3], [28, 28]) 
Example #16
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testMobilenetBase(self):
    tf.reset_default_graph()
    # Verifies that mobilenet_base returns pre-pooling layer.
    with slim.arg_scope((mobilenet.depth_multiplier,), min_depth=32):
      net, _ = mobilenet_v2.mobilenet_base(
          tf.placeholder(tf.float32, (10, 224, 224, 16)),
          conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.1)
      self.assertEqual(net.get_shape().as_list(), [10, 7, 7, 128]) 
Example #17
Source File: mobilenet_v2_test.py    From RetinaNet_Tensorflow_Rotation with MIT License 5 votes vote down vote up
def testImageSizes(self):
    for input_size, output_size in [(224, 7), (192, 6), (160, 5),
                                    (128, 4), (96, 3)]:
      tf.reset_default_graph()
      _, ep = mobilenet_v2.mobilenet(
          tf.placeholder(tf.float32, (10, input_size, input_size, 3)))

      self.assertEqual(ep['layer_18/output'].get_shape().as_list()[1:3],
                       [output_size] * 2) 
Example #18
Source File: mobilenet_v2.py    From edafa with MIT License 5 votes vote down vote up
def wrapped_partial(func, *args, **kwargs):
  partial_func = functools.partial(func, *args, **kwargs)
  functools.update_wrapper(partial_func, func)
  return partial_func


# Wrappers for mobilenet v2 with depth-multipliers. Be noticed that
# 'finegrain_classification_mode' is set to True, which means the embedding
# layer will not be shrinked when given a depth-multiplier < 1.0. 
Example #19
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testDivisibleByWithArgScope(self):
    tf.reset_default_graph()
    # Verifies that depth_multiplier arg scope actually works
    # if no default min_depth is provided.
    with slim.arg_scope((mobilenet.depth_multiplier,), min_depth=32):
      mobilenet_v2.mobilenet(
          tf.placeholder(tf.float32, (10, 224, 224, 2)),
          conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.1)
      s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')]
      s = set(s)
      self.assertSameElements(s, [32, 192, 128, 1001]) 
Example #20
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testDivisibleBy(self):
    tf.reset_default_graph()
    mobilenet_v2.mobilenet(
        tf.placeholder(tf.float32, (10, 224, 224, 16)),
        conv_defs=mobilenet_v2.V2_DEF,
        divisible_by=16,
        min_depth=32)
    s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')]
    s = set(s)
    self.assertSameElements([32, 64, 96, 160, 192, 320, 384, 576, 960, 1280,
                             1001], s) 
Example #21
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testWithOutputStride8(self):
    out, _ = mobilenet.mobilenet_base(
        tf.placeholder(tf.float32, (10, 224, 224, 16)),
        conv_defs=mobilenet_v2.V2_DEF,
        output_stride=8,
        scope='MobilenetV2')
    self.assertEqual(out.get_shape().as_list()[1:3], [28, 28]) 
Example #22
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testImageSizes(self):
    for input_size, output_size in [(224, 7), (192, 6), (160, 5),
                                    (128, 4), (96, 3)]:
      tf.reset_default_graph()
      _, ep = mobilenet_v2.mobilenet(
          tf.placeholder(tf.float32, (10, input_size, input_size, 3)))

      self.assertEqual(ep['layer_18/output'].get_shape().as_list()[1:3],
                       [output_size] * 2) 
Example #23
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testCreationNoClasses(self):
    spec = copy.deepcopy(mobilenet_v2.V2_DEF)
    net, ep = mobilenet.mobilenet(
        tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec,
        num_classes=None)
    self.assertIs(net, ep['global_pool']) 
Example #24
Source File: mobilenet_v2_test.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def testCreation(self):
    spec = dict(mobilenet_v2.V2_DEF)
    _, ep = mobilenet.mobilenet(
        tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec)
    num_convs = len(find_ops('Conv2D'))

    # This is mostly a sanity test. No deep reason for these particular
    # constants.
    #
    # All but first 2 and last one have  two convolutions, and there is one
    # extra conv that is not in the spec. (logits)
    self.assertEqual(num_convs, len(spec['spec']) * 2 - 2)
    # Check that depthwise are exposed.
    for i in range(2, 17):
      self.assertIn('layer_%d/depthwise_output' % i, ep) 
Example #25
Source File: mobilenet_v2.py    From CVTron with Apache License 2.0 5 votes vote down vote up
def mobilenet_base(input_tensor, depth_multiplier=1.0, **kwargs):
  """Creates base of the mobilenet (no pooling and no logits) ."""
  return mobilenet(input_tensor,
                   depth_multiplier=depth_multiplier,
                   base_only=True, **kwargs) 
Example #26
Source File: mobilenet_v2_test.py    From R3Det_Tensorflow with MIT License 5 votes vote down vote up
def testWithOutputStride8AndExplicitPadding(self):
    tf.reset_default_graph()
    out, _ = mobilenet.mobilenet_base(
        tf.placeholder(tf.float32, (10, 224, 224, 16)),
        conv_defs=mobilenet_v2.V2_DEF,
        output_stride=8,
        use_explicit_padding=True,
        scope='MobilenetV2')
    self.assertEqual(out.get_shape().as_list()[1:3], [28, 28]) 
Example #27
Source File: mobilenet_v2_test.py    From R3Det_Tensorflow with MIT License 5 votes vote down vote up
def testWithOutputStride16(self):
    tf.reset_default_graph()
    out, _ = mobilenet.mobilenet_base(
        tf.placeholder(tf.float32, (10, 224, 224, 16)),
        conv_defs=mobilenet_v2.V2_DEF,
        output_stride=16)
    self.assertEqual(out.get_shape().as_list()[1:3], [14, 14]) 
Example #28
Source File: mobilenet_v2_test.py    From R3Det_Tensorflow with MIT License 5 votes vote down vote up
def testMobilenetBase(self):
    tf.reset_default_graph()
    # Verifies that mobilenet_base returns pre-pooling layer.
    with slim.arg_scope((mobilenet.depth_multiplier,), min_depth=32):
      net, _ = mobilenet_v2.mobilenet_base(
          tf.placeholder(tf.float32, (10, 224, 224, 16)),
          conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.1)
      self.assertEqual(net.get_shape().as_list(), [10, 7, 7, 128]) 
Example #29
Source File: mobilenet_v2_test.py    From R3Det_Tensorflow with MIT License 5 votes vote down vote up
def testFineGrained(self):
    tf.reset_default_graph()
    # Verifies that depth_multiplier arg scope actually works
    # if no default min_depth is provided.

    mobilenet_v2.mobilenet(
        tf.placeholder(tf.float32, (10, 224, 224, 2)),
        conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.01,
        finegrain_classification_mode=True)
    s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')]
    s = set(s)
    # All convolutions will be 8->48, except for the last one.
    self.assertSameElements(s, [8, 48, 1001, 1280]) 
Example #30
Source File: mobilenet_v2_test.py    From R3Det_Tensorflow with MIT License 5 votes vote down vote up
def testDivisibleByWithArgScope(self):
    tf.reset_default_graph()
    # Verifies that depth_multiplier arg scope actually works
    # if no default min_depth is provided.
    with slim.arg_scope((mobilenet.depth_multiplier,), min_depth=32):
      mobilenet_v2.mobilenet(
          tf.placeholder(tf.float32, (10, 224, 224, 2)),
          conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.1)
      s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')]
      s = set(s)
      self.assertSameElements(s, [32, 192, 128, 1001])