Python nets.mobilenet.mobilenet_v2.mobilenet() Examples
The following are 30
code examples of nets.mobilenet.mobilenet_v2.mobilenet().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
nets.mobilenet.mobilenet_v2
, or try the search function
.
Example #1
Source File: mobilenet_v2_test.py From edafa with MIT License | 5 votes |
def testCreationNoClasses(self): spec = copy.deepcopy(mobilenet_v2.V2_DEF) net, ep = mobilenet.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec, num_classes=None) self.assertIs(net, ep['global_pool'])
Example #2
Source File: mobilenet_v2_test.py From CBAM-tensorflow-slim with MIT License | 5 votes |
def testCreation(self): spec = dict(mobilenet_v2.V2_DEF) _, ep = mobilenet.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec) num_convs = len(find_ops('Conv2D')) # This is mostly a sanity test. No deep reason for these particular # constants. # # All but first 2 and last one have two convolutions, and there is one # extra conv that is not in the spec. (logits) self.assertEqual(num_convs, len(spec['spec']) * 2 - 2) # Check that depthwise are exposed. for i in range(2, 17): self.assertIn('layer_%d/depthwise_output' % i, ep)
Example #3
Source File: mobilenet_v2_test.py From RetinaNet_Tensorflow_Rotation with MIT License | 5 votes |
def testWithOutputStride8(self): out, _ = mobilenet.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, output_stride=8, scope='MobilenetV2') self.assertEqual(out.get_shape().as_list()[1:3], [28, 28])
Example #4
Source File: mobilenet_v2_test.py From CBAM-tensorflow-slim with MIT License | 5 votes |
def testImageSizes(self): for input_size, output_size in [(224, 7), (192, 6), (160, 5), (128, 4), (96, 3)]: tf.reset_default_graph() _, ep = mobilenet_v2.mobilenet( tf.placeholder(tf.float32, (10, input_size, input_size, 3))) self.assertEqual(ep['layer_18/output'].get_shape().as_list()[1:3], [output_size] * 2)
Example #5
Source File: mobilenet_v2_test.py From CBAM-tensorflow-slim with MIT License | 5 votes |
def testWithSplits(self): spec = copy.deepcopy(mobilenet_v2.V2_DEF) spec['overrides'] = { (ops.expanded_conv,): dict(split_expansion=2), } _, _ = mobilenet.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec) num_convs = len(find_ops('Conv2D')) # All but 3 op has 3 conv operatore, the remainign 3 have one # and there is one unaccounted. self.assertEqual(num_convs, len(spec['spec']) * 3 - 5)
Example #6
Source File: mobilenet_v2_test.py From CBAM-tensorflow-slim with MIT License | 5 votes |
def testCreationNoClasses(self): spec = copy.deepcopy(mobilenet_v2.V2_DEF) net, ep = mobilenet.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec, num_classes=None) self.assertIs(net, ep['global_pool'])
Example #7
Source File: mobilenet_v2_test.py From RetinaNet_Tensorflow_Rotation with MIT License | 5 votes |
def testWithOutputStride16(self): tf.reset_default_graph() out, _ = mobilenet.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, output_stride=16) self.assertEqual(out.get_shape().as_list()[1:3], [14, 14])
Example #8
Source File: mobilenet_v2_test.py From RetinaNet_Tensorflow_Rotation with MIT License | 5 votes |
def testMobilenetBase(self): tf.reset_default_graph() # Verifies that mobilenet_base returns pre-pooling layer. with slim.arg_scope((mobilenet.depth_multiplier,), min_depth=32): net, _ = mobilenet_v2.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.1) self.assertEqual(net.get_shape().as_list(), [10, 7, 7, 128])
Example #9
Source File: mobilenet_v2_test.py From RetinaNet_Tensorflow_Rotation with MIT License | 5 votes |
def testFineGrained(self): tf.reset_default_graph() # Verifies that depth_multiplier arg scope actually works # if no default min_depth is provided. mobilenet_v2.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 2)), conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.01, finegrain_classification_mode=True) s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')] s = set(s) # All convolutions will be 8->48, except for the last one. self.assertSameElements(s, [8, 48, 1001, 1280])
Example #10
Source File: mobilenet_v2_test.py From RetinaNet_Tensorflow_Rotation with MIT License | 5 votes |
def testDivisibleByWithArgScope(self): tf.reset_default_graph() # Verifies that depth_multiplier arg scope actually works # if no default min_depth is provided. with slim.arg_scope((mobilenet.depth_multiplier,), min_depth=32): mobilenet_v2.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 2)), conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.1) s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')] s = set(s) self.assertSameElements(s, [32, 192, 128, 1001])
Example #11
Source File: mobilenet_v2_test.py From RetinaNet_Tensorflow_Rotation with MIT License | 5 votes |
def testWithOutputStride8AndExplicitPadding(self): tf.reset_default_graph() out, _ = mobilenet.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, output_stride=8, use_explicit_padding=True, scope='MobilenetV2') self.assertEqual(out.get_shape().as_list()[1:3], [28, 28])
Example #12
Source File: mobilenet_v2_test.py From edafa with MIT License | 5 votes |
def testDivisibleByWithArgScope(self): tf.reset_default_graph() # Verifies that depth_multiplier arg scope actually works # if no default min_depth is provided. with slim.arg_scope((mobilenet.depth_multiplier,), min_depth=32): mobilenet_v2.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 2)), conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.1) s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')] s = set(s) self.assertSameElements(s, [32, 192, 128, 1001])
Example #13
Source File: mobilenet_v2_test.py From edafa with MIT License | 5 votes |
def testWithOutputStride8(self): out, _ = mobilenet.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, output_stride=8, scope='MobilenetV2') self.assertEqual(out.get_shape().as_list()[1:3], [28, 28])
Example #14
Source File: mobilenet_v2_test.py From edafa with MIT License | 5 votes |
def testWithSplits(self): spec = copy.deepcopy(mobilenet_v2.V2_DEF) spec['overrides'] = { (ops.expanded_conv,): dict(split_expansion=2), } _, _ = mobilenet.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec) num_convs = len(find_ops('Conv2D')) # All but 3 op has 3 conv operatore, the remainign 3 have one # and there is one unaccounted. self.assertEqual(num_convs, len(spec['spec']) * 3 - 5)
Example #15
Source File: mobilenet_v2_test.py From edafa with MIT License | 5 votes |
def testImageSizes(self): for input_size, output_size in [(224, 7), (192, 6), (160, 5), (128, 4), (96, 3)]: tf.reset_default_graph() _, ep = mobilenet_v2.mobilenet( tf.placeholder(tf.float32, (10, input_size, input_size, 3))) self.assertEqual(ep['layer_18/output'].get_shape().as_list()[1:3], [output_size] * 2)
Example #16
Source File: mobilenet_v2_test.py From edafa with MIT License | 5 votes |
def testFineGrained(self): tf.reset_default_graph() # Verifies that depth_multiplier arg scope actually works # if no default min_depth is provided. mobilenet_v2.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 2)), conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.01, finegrain_classification_mode=True) s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')] s = set(s) # All convolutions will be 8->48, except for the last one. self.assertSameElements(s, [8, 48, 1001, 1280])
Example #17
Source File: mobilenet_v2_test.py From edafa with MIT License | 5 votes |
def testCreation(self): spec = dict(mobilenet_v2.V2_DEF) _, ep = mobilenet.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec) num_convs = len(find_ops('Conv2D')) # This is mostly a sanity test. No deep reason for these particular # constants. # # All but first 2 and last one have two convolutions, and there is one # extra conv that is not in the spec. (logits) self.assertEqual(num_convs, len(spec['spec']) * 2 - 2) # Check that depthwise are exposed. for i in range(2, 17): self.assertIn('layer_%d/depthwise_output' % i, ep)
Example #18
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testBatchNormScopeDoesHasIsTrainingWhenItsNotNone(self): sc = mobilenet.training_scope(is_training=False) self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)]) sc = mobilenet.training_scope(is_training=True) self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)]) sc = mobilenet.training_scope() self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)])
Example #19
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testBatchNormScopeDoesNotHaveIsTrainingWhenItsSetToNone(self): sc = mobilenet.training_scope(is_training=None) self.assertNotIn('is_training', sc[slim.arg_scope_func_key( slim.batch_norm)])
Example #20
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testWithOutputStride8AndExplicitPadding(self): tf.reset_default_graph() out, _ = mobilenet.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, output_stride=8, use_explicit_padding=True, scope='MobilenetV2') self.assertEqual(out.get_shape().as_list()[1:3], [28, 28])
Example #21
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testWithOutputStride16(self): tf.reset_default_graph() out, _ = mobilenet.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, output_stride=16) self.assertEqual(out.get_shape().as_list()[1:3], [14, 14])
Example #22
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testMobilenetBase(self): tf.reset_default_graph() # Verifies that mobilenet_base returns pre-pooling layer. with slim.arg_scope((mobilenet.depth_multiplier,), min_depth=32): net, _ = mobilenet_v2.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.1) self.assertEqual(net.get_shape().as_list(), [10, 7, 7, 128])
Example #23
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testFineGrained(self): tf.reset_default_graph() # Verifies that depth_multiplier arg scope actually works # if no default min_depth is provided. mobilenet_v2.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 2)), conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.01, finegrain_classification_mode=True) s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')] s = set(s) # All convolutions will be 8->48, except for the last one. self.assertSameElements(s, [8, 48, 1001, 1280])
Example #24
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testDivisibleByWithArgScope(self): tf.reset_default_graph() # Verifies that depth_multiplier arg scope actually works # if no default min_depth is provided. with slim.arg_scope((mobilenet.depth_multiplier,), min_depth=32): mobilenet_v2.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 2)), conv_defs=mobilenet_v2.V2_DEF, depth_multiplier=0.1) s = [op.outputs[0].get_shape().as_list()[-1] for op in find_ops('Conv2D')] s = set(s) self.assertSameElements(s, [32, 192, 128, 1001])
Example #25
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testWithOutputStride8(self): out, _ = mobilenet.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, output_stride=8, scope='MobilenetV2') self.assertEqual(out.get_shape().as_list()[1:3], [28, 28])
Example #26
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testWithSplits(self): spec = copy.deepcopy(mobilenet_v2.V2_DEF) spec['overrides'] = { (ops.expanded_conv,): dict(split_expansion=2), } _, _ = mobilenet.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec) num_convs = len(find_ops('Conv2D')) # All but 3 op has 3 conv operatore, the remainign 3 have one # and there is one unaccounted. self.assertEqual(num_convs, len(spec['spec']) * 3 - 5)
Example #27
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testImageSizes(self): for input_size, output_size in [(224, 7), (192, 6), (160, 5), (128, 4), (96, 3)]: tf.reset_default_graph() _, ep = mobilenet_v2.mobilenet( tf.placeholder(tf.float32, (10, input_size, input_size, 3))) self.assertEqual(ep['layer_18/output'].get_shape().as_list()[1:3], [output_size] * 2)
Example #28
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testCreationNoClasses(self): spec = copy.deepcopy(mobilenet_v2.V2_DEF) net, ep = mobilenet.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec, num_classes=None) self.assertIs(net, ep['global_pool'])
Example #29
Source File: mobilenet_v2_test.py From CVTron with Apache License 2.0 | 5 votes |
def testCreation(self): spec = dict(mobilenet_v2.V2_DEF) _, ep = mobilenet.mobilenet( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=spec) num_convs = len(find_ops('Conv2D')) # This is mostly a sanity test. No deep reason for these particular # constants. # # All but first 2 and last one have two convolutions, and there is one # extra conv that is not in the spec. (logits) self.assertEqual(num_convs, len(spec['spec']) * 2 - 2) # Check that depthwise are exposed. for i in range(2, 17): self.assertIn('layer_%d/depthwise_output' % i, ep)
Example #30
Source File: mobilenet_v2_test.py From R3Det_Tensorflow with MIT License | 5 votes |
def testWithOutputStride8AndExplicitPadding(self): tf.reset_default_graph() out, _ = mobilenet.mobilenet_base( tf.placeholder(tf.float32, (10, 224, 224, 16)), conv_defs=mobilenet_v2.V2_DEF, output_stride=8, use_explicit_padding=True, scope='MobilenetV2') self.assertEqual(out.get_shape().as_list()[1:3], [28, 28])