Python tensorflow.contrib.slim.nets.resnet_v1.bottleneck() Examples

The following are 7 code examples of tensorflow.contrib.slim.nets.resnet_v1.bottleneck(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.contrib.slim.nets.resnet_v1 , or try the search function .
Example #1
Source File: resnet_v1_test.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def testEndPointsV1(self):
    """Test the end points of a tiny v1 bottleneck network."""
    bottleneck = resnet_v1.bottleneck
    blocks = [resnet_utils.Block('block1', bottleneck, [(4, 1, 1), (4, 1, 2)]),
              resnet_utils.Block('block2', bottleneck, [(8, 2, 1), (8, 2, 1)])]
    inputs = create_test_input(2, 32, 16, 3)
    with slim.arg_scope(resnet_utils.resnet_arg_scope()):
      _, end_points = self._resnet_plain(inputs, blocks, scope='tiny')
    expected = [
        'tiny/block1/unit_1/bottleneck_v1/shortcut',
        'tiny/block1/unit_1/bottleneck_v1/conv1',
        'tiny/block1/unit_1/bottleneck_v1/conv2',
        'tiny/block1/unit_1/bottleneck_v1/conv3',
        'tiny/block1/unit_2/bottleneck_v1/conv1',
        'tiny/block1/unit_2/bottleneck_v1/conv2',
        'tiny/block1/unit_2/bottleneck_v1/conv3',
        'tiny/block2/unit_1/bottleneck_v1/shortcut',
        'tiny/block2/unit_1/bottleneck_v1/conv1',
        'tiny/block2/unit_1/bottleneck_v1/conv2',
        'tiny/block2/unit_1/bottleneck_v1/conv3',
        'tiny/block2/unit_2/bottleneck_v1/conv1',
        'tiny/block2/unit_2/bottleneck_v1/conv2',
        'tiny/block2/unit_2/bottleneck_v1/conv3']
    self.assertItemsEqual(expected, end_points) 
Example #2
Source File: resnet_v1_test.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def _resnet_small(self,
                    inputs,
                    num_classes=None,
                    global_pool=True,
                    output_stride=None,
                    include_root_block=True,
                    reuse=None,
                    scope='resnet_v1_small'):
    """A shallow and thin ResNet v1 for faster tests."""
    bottleneck = resnet_v1.bottleneck
    blocks = [
        resnet_utils.Block(
            'block1', bottleneck, [(4, 1, 1)] * 2 + [(4, 1, 2)]),
        resnet_utils.Block(
            'block2', bottleneck, [(8, 2, 1)] * 2 + [(8, 2, 2)]),
        resnet_utils.Block(
            'block3', bottleneck, [(16, 4, 1)] * 2 + [(16, 4, 2)]),
        resnet_utils.Block(
            'block4', bottleneck, [(32, 8, 1)] * 2)]
    return resnet_v1.resnet_v1(inputs, blocks, num_classes, global_pool,
                               output_stride, include_root_block, reuse, scope) 
Example #3
Source File: truncated_base_network.py    From Table-Detection-using-Deep-learning with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _build_tail(self, inputs, is_training=False):
        if not self._use_tail:
            return inputs

        if self._architecture == 'resnet_v1_101':
            train_batch_norm = (
                is_training and self._config.get('train_batch_norm')
            )
            with self._enter_variable_scope():
                weight_decay = (
                    self._config.get('arg_scope', {}).get('weight_decay', 0)
                )
                with tf.variable_scope(self._architecture, reuse=True):
                    resnet_arg_scope = resnet_utils.resnet_arg_scope(
                            batch_norm_epsilon=1e-5,
                            batch_norm_scale=True,
                            weight_decay=weight_decay
                        )
                    with slim.arg_scope(resnet_arg_scope):
                        with slim.arg_scope(
                            [slim.batch_norm], is_training=train_batch_norm
                        ):
                            blocks = [
                                resnet_utils.Block(
                                    'block4',
                                    resnet_v1.bottleneck,
                                    [{
                                        'depth': 2048,
                                        'depth_bottleneck': 512,
                                        'stride': 1
                                    }] * 3
                                )
                            ]
                            proposal_classifier_features = (
                                resnet_utils.stack_blocks_dense(inputs, blocks)
                            )
        else:
            proposal_classifier_features = inputs

        return proposal_classifier_features 
Example #4
Source File: resnet_v1_test.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def testAtrousValuesBottleneck(self):
    self._atrousValues(resnet_v1.bottleneck) 
Example #5
Source File: truncated_base_network.py    From Tabulo with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _build_tail(self, inputs, is_training=False):
        if not self._use_tail:
            return inputs

        if self._architecture == 'resnet_v1_101':
            train_batch_norm = (
                is_training and self._config.get('train_batch_norm')
            )
            with self._enter_variable_scope():
                weight_decay = (
                    self._config.get('arg_scope', {}).get('weight_decay', 0)
                )
                with tf.variable_scope(self._architecture, reuse=True):
                    resnet_arg_scope = resnet_utils.resnet_arg_scope(
                            batch_norm_epsilon=1e-5,
                            batch_norm_scale=True,
                            weight_decay=weight_decay
                        )
                    with slim.arg_scope(resnet_arg_scope):
                        with slim.arg_scope(
                            [slim.batch_norm], is_training=train_batch_norm
                        ):
                            blocks = [
                                resnet_utils.Block(
                                    'block4',
                                    resnet_v1.bottleneck,
                                    [{
                                        'depth': 2048,
                                        'depth_bottleneck': 512,
                                        'stride': 1
                                    }] * 3
                                )
                            ]
                            proposal_classifier_features = (
                                resnet_utils.stack_blocks_dense(inputs, blocks)
                            )
        else:
            proposal_classifier_features = inputs

        return proposal_classifier_features 
Example #6
Source File: truncated_base_network.py    From luminoth with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _build_tail(self, inputs, is_training=False):
        if not self._use_tail:
            return inputs

        if self._architecture == 'resnet_v1_101':
            train_batch_norm = (
                is_training and self._config.get('train_batch_norm')
            )
            with self._enter_variable_scope():
                weight_decay = (
                    self._config.get('arg_scope', {}).get('weight_decay', 0)
                )
                with tf.variable_scope(self._architecture, reuse=True):
                    resnet_arg_scope = resnet_utils.resnet_arg_scope(
                            batch_norm_epsilon=1e-5,
                            batch_norm_scale=True,
                            weight_decay=weight_decay
                        )
                    with slim.arg_scope(resnet_arg_scope):
                        with slim.arg_scope(
                            [slim.batch_norm], is_training=train_batch_norm
                        ):
                            blocks = [
                                resnet_utils.Block(
                                    'block4',
                                    resnet_v1.bottleneck,
                                    [{
                                        'depth': 2048,
                                        'depth_bottleneck': 512,
                                        'stride': 1
                                    }] * 3
                                )
                            ]
                            proposal_classifier_features = (
                                resnet_utils.stack_blocks_dense(inputs, blocks)
                            )
        else:
            proposal_classifier_features = inputs

        return proposal_classifier_features 
Example #7
Source File: resnet_v1_test.py    From deep_image_model with Apache License 2.0 4 votes vote down vote up
def _atrousValues(self, bottleneck):
    """Verify the values of dense feature extraction by atrous convolution.

    Make sure that dense feature extraction by stack_blocks_dense() followed by
    subsampling gives identical results to feature extraction at the nominal
    network output stride using the simple self._stack_blocks_nondense() above.

    Args:
      bottleneck: The bottleneck function.
    """
    blocks = [
        resnet_utils.Block('block1', bottleneck, [(4, 1, 1), (4, 1, 2)]),
        resnet_utils.Block('block2', bottleneck, [(8, 2, 1), (8, 2, 2)]),
        resnet_utils.Block('block3', bottleneck, [(16, 4, 1), (16, 4, 2)]),
        resnet_utils.Block('block4', bottleneck, [(32, 8, 1), (32, 8, 1)])
    ]
    nominal_stride = 8

    # Test both odd and even input dimensions.
    height = 30
    width = 31
    with slim.arg_scope(resnet_utils.resnet_arg_scope(is_training=False)):
      for output_stride in [1, 2, 4, 8, None]:
        with tf.Graph().as_default():
          with self.test_session() as sess:
            tf.set_random_seed(0)
            inputs = create_test_input(1, height, width, 3)
            # Dense feature extraction followed by subsampling.
            output = resnet_utils.stack_blocks_dense(inputs,
                                                     blocks,
                                                     output_stride)
            if output_stride is None:
              factor = 1
            else:
              factor = nominal_stride // output_stride

            output = resnet_utils.subsample(output, factor)
            # Make the two networks use the same weights.
            tf.get_variable_scope().reuse_variables()
            # Feature extraction at the nominal network rate.
            expected = self._stack_blocks_nondense(inputs, blocks)
            sess.run(tf.global_variables_initializer())
            output, expected = sess.run([output, expected])
            self.assertAllClose(output, expected, atol=1e-4, rtol=1e-4)