Python mxnet.gluon.nn.HybridBlock() Examples

The following are 30 code examples of mxnet.gluon.nn.HybridBlock(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module mxnet.gluon.nn , or try the search function .
Example #1
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_activation_slice_activation():
    class Net(gluon.HybridBlock):
        def __init__(self, act0, act1, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.slice = slice
                self.act0 = nn.Activation(act0)
                self.act1 = nn.Activation(act1)

        def hybrid_forward(self, F, x):
            x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1])
            y = self.act0(x_slice)
            y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1])
            out = self.act1(y_slice)
            return out
    acts = ["relu", "sigmoid", "tanh", "softrelu"]
    for idx0, act0 in enumerate(acts):
        for idx1, act1 in enumerate(acts):
            if idx1 == idx0:
                continue
            x = mx.nd.random.uniform(-1, 1, shape=(8, 32, 64, 64))
            slice = [[(0, 16, 32, 32), (4, 32, 64, 64)], [(2, 0, 16, 16), (4, 16, 32, 32)]]
            net = Net(act0, act1, slice)
            check_layer_forward_withinput(net, x) 
Example #2
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_dense():
    class Net(gluon.HybridBlock):
        def __init__(self, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                channel0 = np.random.randint(1, 17)
                self.dense0 = nn.Dense(channel0)

        def hybrid_forward(self, F, x):
            x_reshape = x.reshape((8, 64, 128, -1))
            out = self.dense0(x_reshape)
            return out

    x = mx.nd.random.uniform(shape=(4, 32, 64, 64))
    net = Net()
    check_layer_forward_withinput(net, x) 
Example #3
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_conv_reshape_conv():
    class Net(gluon.HybridBlock):
        def __init__(self, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.conv0 = nn.Conv2D(64, (3, 3))
                self.conv1 = nn.Conv2D(128, (3, 3))

        def hybrid_forward(self, F, x):
            x_slice = x.slice(begin=(0, 0, 1, 1), end=(4, 16, 33, 33))
            y = self.conv0(x_slice)
            "shape of y is (4, 64, 30, 30)"
            y_reshape = y.reshape((0, 0, 60, 15))
            out = self.conv1(y_reshape)
            return out

    x = mx.nd.random.uniform(shape=(4, 32, 64, 64))
    net = Net()
    check_layer_forward_withinput(net, x) 
Example #4
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_dense():
    class Net(gluon.HybridBlock):
        def __init__(self, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                channel0 = np.random.randint(1, 17)
                self.dense0 = nn.Dense(channel0)
                self.slice = slice

        def hybrid_forward(self, F, x):
            x_slice = x.slice(begin=tuple(self.slice[0]),
                              end=tuple(self.slice[1]))
            out = self.dense0(x_slice)
            return out

    x = mx.nd.random.uniform(shape=(16, 32, 64, 64))
    slice = [[0, 16, 0, 0], [4, 32, 32, 32]]
    net = Net(slice)
    check_layer_forward_withinput(net, x) 
Example #5
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_conv_slice_conv():
    class Net(gluon.HybridBlock):
        def __init__(self, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.conv0 = nn.Conv2D(32, (3, 3))
                self.conv1 = nn.Conv2D(16, (1, 1))

        def hybrid_forward(self, F, x):
            x_slice = x.slice(begin=(0, 0, 0, 0), end=(4, 16, 16, 16))
            y = self.conv0(x_slice)
            "shape of y is (4, 32, 14, 14)"
            y_slice = y.slice(begin=(0, 0, 0, 0), end=(4, 16, 3, 3))
            out = self.conv1(y_slice)
            return out
    x = mx.nd.random.uniform(shape=(4, 32, 32, 32))
    net = Net()
    check_layer_forward_withinput(net, x) 
Example #6
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_conv_reshape_conv():
    class Net(gluon.HybridBlock):
        def __init__(self, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.conv0 = nn.Conv2D(64, (3, 3))
                self.conv1 = nn.Conv2D(128, (3, 3))

        def hybrid_forward(self, F, x):
            x_reshape = x.reshape((0, 0, 128, 32))
            y = self.conv0(x_reshape)
            "spatial shape of y is (62, 62)"
            y_reshape = y.reshape((0, 0, 124, 31))
            out = self.conv1(y_reshape)
            return out
    x = mx.nd.random.uniform(shape=(4, 3, 64, 64))
    net = Net()
    check_layer_forward_withinput(net, x) 
Example #7
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_conv2d_16c():
    chn_list = [16, 256]
    kernel_list = [1, 3]
    kernel_list.append(224)
    batch_size = 4
    class Net(gluon.HybridBlock):
        def __init__(self,
                     chn_num,
                     kernel,
                     **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel))

        def hybrid_forward(self, F, x):
            out = self.conv0(x)
            return out

    x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, 224, 224))
    for i in range(len(chn_list)):
        for j in range(len(kernel_list)):
            net = Net(chn_list[i], kernel_list[j])
            check_layer_forward_withinput(net, x) 
Example #8
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_deconv2d_16c():
    in_chn_list = [1024, 512, 256, 128, 64, 32, 16]
    out_chn_list = [512, 256, 128, 64, 32, 16, 3]
    kernel_list = [1, 3, 5, 7]
    in_shape = [4, 8, 16, 32, 64, 224]
    batch_size = 4
    class Net(gluon.HybridBlock):
        def __init__(self, chn_num, kernel, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.deconv0 = gluon.nn.Conv2DTranspose(chn_num, (kernel, kernel))

        def hybrid_forward(self, F, x):
            out = self.deconv0(x)
            return out
    for i in range(len(in_shape)):
        x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, in_chn_list[i], in_shape[i], in_shape[i]))
        for j in range(len(kernel_list)):
            net = Net(out_chn_list[i], kernel_list[j])
            check_layer_forward_withinput(net, x) 
Example #9
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_dense_slice_dense():
    class Net(gluon.HybridBlock):
        def __init__(self, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                channel0 = 32
                channel1 = np.random.randint(1, 17)
                self.dense0 = nn.Dense(channel0)
                self.dense1 = nn.Dense(channel1)
                self.slice = slice

        def hybrid_forward(self, F, x):
            x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1]))
            y = self.dense0(x_slice)
            y_slice = y.slice(begin=(1, 0), end=(3, 10))
            out = self.dense1(y_slice)
            return out

    x = mx.nd.random.uniform(shape=(16, 32, 64, 64))
    slice = [[0, 16, 0, 0], [4, 32, 32, 32]]
    net = Net(slice)
    check_layer_forward_withinput(net, x) 
Example #10
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_dense_reshape_dense():
    class Net(gluon.HybridBlock):
        def __init__(self, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                channel0 = np.random.randint(1, 17)
                channel1 = np.random.randint(1, 33)
                self.dense0 = nn.Dense(channel0)
                self.dense1 = nn.Dense(channel1)

        def hybrid_forward(self, F, x):
            x_reshape = x.reshape((4, 16, 128, 32))
            y = self.dense0(x_reshape)
            y_reshape = y.reshape((1, -1))
            out = self.dense1(y_reshape)
            return out

    x = mx.nd.random.uniform(shape=(4, 16, 64, 64))
    net = Net()
    check_layer_forward_withinput(net, x) 
Example #11
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_batchnorm():
    class Net(gluon.HybridBlock):
        def __init__(self, shape, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.conv0 = nn.Conv2D(96, (1, 1))
                self.bn0 = nn.BatchNorm()
                self.reshape = shape

        def hybrid_forward(self, F, x):
            x_in = self.conv0(x)
            x_reshape = x_in.reshape(self.reshape)
            out = self.bn0(x_reshape)
            return out

    x = mx.nd.random.uniform(shape=(4, 32, 64, 64))
    shape = (4, 64, 64, -1)
    net = Net(shape)
    check_layer_forward_withinput(net, x) 
Example #12
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_batchnorm():
    class Net(gluon.HybridBlock):
        def __init__(self, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.conv0 = nn.Conv2D(128, (1, 1))
                self.bn0 = nn.BatchNorm()
                self.slice = slice

        def hybrid_forward(self, F, x):
            x_in = self.conv0(x)
            x_slice = x_in.slice(begin=tuple(self.slice[0]),
                              end=tuple(self.slice[1]))
            out = self.bn0(x_slice)
            return out

    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
    slice = [[0, 0, 0, 0], [4, 32, 32, 32]]
    net = Net(slice)
    check_layer_forward_withinput(net, x) 
Example #13
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_batchnorm_slice_batchnorm():
    class Net(gluon.HybridBlock):
        def __init__(self, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.conv0 = nn.Conv2D(128, (1, 1))
                self.bn0 = nn.BatchNorm()
                self.bn1 = nn.BatchNorm()
                self.slice = slice

        def hybrid_forward(self, F, x):
            x_in = self.conv0(x)
            x_slice = x_in.slice(begin=tuple(self.slice[0][0]), end=tuple(self.slice[0][1]))
            y = self.bn0(x_slice)
            y_slice = y.slice(begin=tuple(self.slice[1][0]), end=tuple(self.slice[1][1]))
            out = self.bn1(y_slice)
            return out

    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
    slice = [[[0, 0, 0, 0], [4, 32, 32, 32]], [[0, 0, 0, 0], [2, 64, 16, 16]]]
    net = Net(slice)
    check_layer_forward_withinput(net, x) 
Example #14
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_batchnorm_reshape_batchnorm():
    class Net(gluon.HybridBlock):
        def __init__(self, shape, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.conv0 = nn.Conv2D(128, (1, 1))
                self.bn0 = nn.BatchNorm()
                self.bn1 = nn.BatchNorm()
                self.reshape = shape
                self.slice = slice

        def hybrid_forward(self, F, x):
            x_in = self.conv0(x)
            x_slice = x_in.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1]))
            y = self.bn0(x_slice)
            y_reshape = y.reshape(self.reshape)
            out = self.bn1(y_reshape)
            return out

    x = mx.nd.random.uniform(shape=(16, 128, 256, 256))
    slice = [[0, 0, 0, 0], [4, 32, 32, 32]]
    shape = (1, 128, 64, -1)
    net = Net(shape, slice)
    check_layer_forward_withinput(net, x) 
Example #15
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_dense_reshape_dense():
    class Net(gluon.HybridBlock):
        def __init__(self, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                channel0 = np.random.randint(1, 17)
                channel1 = np.random.randint(1, 17)
                self.dense0 = nn.Dense(channel0)
                self.dense1 = nn.Dense(channel1)
                self.slice = slice

        def hybrid_forward(self, F, x):
            x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1]))
            y = self.dense0(x_slice)
            y_reshape = y.reshape((1, -1))
            out = self.dense1(y_reshape)
            return out

    x = mx.nd.random.uniform(shape=(16, 32, 64, 64))
    slice = [[0, 16, 0, 0], [4, 32, 32, 32]]
    net = Net(slice)
    check_layer_forward_withinput(net, x) 
Example #16
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_activation_reshape_activation():
    class Net(gluon.HybridBlock):
        def __init__(self, act0, act1, shape, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.reshape = shape
                self.act0 = nn.Activation(act0)
                self.act1 = nn.Activation(act1)

        def hybrid_forward(self, F, x):
            x_reshape = x.reshape(self.reshape[0])
            y = self.act0(x_reshape)
            y_reshape = y.reshape(self.reshape[1])
            out = self.act1(y_reshape)
            return out
    acts = ["relu", "sigmoid", "tanh", "softrelu"]
    for idx0, act0 in enumerate(acts):
        for idx1, act1 in enumerate(acts):
            if idx1 == idx0:
                continue
            x = mx.nd.random.uniform(-1, 1, shape=(4, 16, 32, 32))
            shape = [(4, 32, 32, -1), (4, 32, 16, -1)]
            net = Net(act0, act1, shape)
            check_layer_forward_withinput(net, x) 
Example #17
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_activation():
    class Net(gluon.HybridBlock):
        def __init__(self, act, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.slice = slice
                self.act = nn.Activation(act)

        def hybrid_forward(self, F, x):
            x_slice = x.slice(begin=self.slice[0], end=self.slice[1])
            out = self.act(x_slice)
            return out

    acts = ["relu", "sigmoid", "tanh", "softrelu"]
    for act in acts:
        x = mx.nd.random.uniform(-1, 1, shape=(8, 32, 64, 64))
        slice = [(0, 16, 32, 32), (4, 32, 64, 64)]
        net = Net(act, slice)
        check_layer_forward_withinput(net, x) 
Example #18
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_activation():
    class Net(gluon.HybridBlock):
        def __init__(self, act, shape, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.reshape = shape
                self.act = nn.Activation(act)

        def hybrid_forward(self, F, x):
            x_reshape = x.reshape(self.reshape)
            out = self.act(x_reshape)
            return out
    acts = ["relu", "sigmoid", "tanh", "softrelu"]
    for act in acts:
        x = mx.nd.random.uniform(-1, 1, shape=(4, 16, 32, 32))
        shape = (4, 32, 32, -1)
        net = Net(act, shape)
        check_layer_forward_withinput(net, x) 
Example #19
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_deconv_slice_deconv():
    class Net(gluon.HybridBlock):
        def __init__(self, shape, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.reshape = shape
                self.slice = slice
                self.conv0 = nn.Conv2DTranspose(32, (3, 3))
                self.conv1 = nn.Conv2DTranspose(64, (3, 3), strides=(2, 2))

        def hybrid_forward(self, F, x):
            x_reshape = x.reshape(self.reshape)
            y = self.conv0(x_reshape)
            "shape of y is (4, 32, 66, 18)"
            y_slice = y.slice(begin=self.slice[0], end=self.slice[1])
            out = self.conv1(y_slice)
            return out
    x = mx.nd.random.uniform(shape=(4, 16, 32, 32))
    shape = (4, 16, 64, -1)
    slice = [(0, 0, 0, 0), (2, 16, 16, 16)]
    net = Net(shape, slice)
    check_layer_forward_withinput(net, x) 
Example #20
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_deconv_slice_deconv():
    class Net(gluon.HybridBlock):
        def __init__(self, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.slice = slice
                self.conv0 = nn.Conv2DTranspose(32, (3, 3))
                self.conv1 = nn.Conv2DTranspose(64, (3, 3), strides=(2, 2))

        def hybrid_forward(self, F, x):
            x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1])
            y = self.conv0(x_slice)
            "shape of y is (4, 32, 66, 18)"
            y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1])
            out = self.conv1(y_slice)
            return out
    x = mx.nd.random.uniform(shape=(8, 32, 64, 64))
    slice = [[(0, 0, 0, 0), (4, 16, 32, 32)], [(0, 0, 0, 0), (2, 16, 16, 16)]]
    net = Net(slice)
    check_layer_forward_withinput(net, x) 
Example #21
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_deconv_reshape_deconv():
    class Net(gluon.HybridBlock):
        def __init__(self, shape, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.reshape = shape
                self.conv0 = nn.Conv2DTranspose(32, (3, 3))
                self.conv1 = nn.Conv2DTranspose(64, (3, 3), strides=(2, 2))

        def hybrid_forward(self, F, x):
            x_reshape = x.reshape(self.reshape[0])
            y = self.conv0(x_reshape)
            "shape of y is (4, 32, 66, 18)"
            y_reshape = y.reshape(self.reshape[1])
            out = self.conv1(y_reshape)
            return out
    x = mx.nd.random.uniform(shape=(4, 16, 32, 32))
    shape = [(4, 16, 64, -1), (4, 32, 33, -1)]
    net = Net(shape)
    check_layer_forward_withinput(net, x) 
Example #22
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_slice_deconv():
    class Net(gluon.HybridBlock):
        def __init__(self, slice, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.slice = slice
                self.conv0 = nn.Conv2DTranspose(64, (3, 3))

        def hybrid_forward(self, F, x):
            x_slice = x.slice(begin=self.slice[0], end=self.slice[1])
            out = self.conv0(x_slice)
            return out
    x = mx.nd.random.uniform(shape=(8, 32, 64, 64))
    slice = [(0, 16, 0, 0), (4, 32, 32, 32)]
    net = Net(slice)
    check_layer_forward_withinput(net, x) 
Example #23
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_deconv():
    class Net(gluon.HybridBlock):
        def __init__(self, shape, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.reshape = shape
                self.conv0 = nn.Conv2DTranspose(64, (3, 3))

        def hybrid_forward(self, F, x):
            x_reshape = x.reshape(self.reshape)
            out = self.conv0(x_reshape)
            return out
    x = mx.nd.random.uniform(shape=(4, 16, 32, 32))
    shape = (4, 16, 64, -1)
    net = Net(shape)
    check_layer_forward_withinput(net, x) 
Example #24
Source File: duc_mobilenet.py    From gluon-cv with Apache License 2.0 6 votes vote down vote up
def get_duc_mobilenet(base_network, pretrained=False, ctx=cpu(), **kwargs):
    """Get mobilenet with duc upsampling layers.

    Parameters
    ----------
    base_network : str
        Name of the base feature extraction network.
    pretrained : bool
        Whether load pretrained base network.
    ctx : mxnet.Context
        mx.cpu() or mx.gpu()
    Returns
    -------
    nn.HybridBlock
        Network instance of mobilenet with duc upsampling layers

    """
    net = DUCMobilenet(base_network=base_network, pretrained_base=pretrained, **kwargs)
    with warnings.catch_warnings(record=True) as _:
        warnings.simplefilter("always")
        net.initialize()
    net.collect_params().reset_ctx(ctx)
    return net 
Example #25
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_constant():
    class Test(gluon.HybridBlock):
        def __init__(self, **kwargs):
            super(Test, self).__init__(**kwargs)
            self.value = np.asarray([[1,2], [3,4]])
            self.const = self.params.get_constant('const', self.value)

        def hybrid_forward(self, F, x, const):
            return x + const

    test = Test()
    test.initialize()
    trainer = gluon.Trainer(test.collect_params(), 'sgd',
                            {'learning_rate': 1.0, 'momentum': 0.5})

    with mx.autograd.record():
        x = mx.nd.ones((2,2))
        x.attach_grad()
        y = test(x)
        y.backward()

    trainer.step(1)

    assert (test.const.data().asnumpy() == test.value).all()
    assert (x.grad.asnumpy() == 1).all() 
Example #26
Source File: test_gluon.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def test_reshape_batchnorm_reshape_batchnorm():
    class Net(gluon.HybridBlock):
        def __init__(self, shape, **kwargs):
            super(Net, self).__init__(**kwargs)
            with self.name_scope():
                self.conv0 = nn.Conv2D(128, (1, 1))
                self.bn0 = nn.BatchNorm()
                self.bn1 = nn.BatchNorm()
                self.reshape = shape

        def hybrid_forward(self, F, x):
            x_in = self.conv0(x)
            x_reshape = x_in.reshape(self.reshape[0])
            y = self.bn0(x_reshape)
            y_reshape = y.reshape(self.reshape[1])
            out = self.bn1(y_reshape)
            return out

    x = mx.nd.random.uniform(shape=(4, 32, 64, 64))
    shape = [(4, 64, 64, -1), (4, 128, -1, 32)]
    net = Net(shape)
    check_layer_forward_withinput(net, x) 
Example #27
Source File: center_net.py    From gluon-cv with Apache License 2.0 5 votes vote down vote up
def center_net_mobilenetv3_small_duc_voc(pretrained=False, pretrained_base=True, **kwargs):
    """Center net with mobilenetv3_small base network with DUC layers on voc dataset.

    Parameters
    ----------
    classes : iterable of str
        Names of custom foreground classes. `len(classes)` is the number of foreground classes.
    pretrained_base : bool or str, optional, default is True
        Load pretrained base network, the extra layers are randomized.

    Returns
    -------
    HybridBlock
        A CenterNet detection network.

    """
    from .duc_mobilenet import mobilenetv3_small_duc
    from ...data import VOCDetection
    classes = VOCDetection.CLASSES
    pretrained_base = False if pretrained else pretrained_base
    base_network = mobilenetv3_small_duc(pretrained=pretrained_base, **kwargs)
    heads = OrderedDict([
        ('heatmap', {'num_output': len(classes), 'bias': -2.19}), # use bias = -log((1 - 0.1) / 0.1)
        ('wh', {'num_output': 2}),
        ('reg', {'num_output': 2})
        ])
    return get_center_net('mobilenetv3_small_duc', 'voc', base_network=base_network, heads=heads,
                          head_conv_channel=64, pretrained=pretrained, classes=classes,
                          scale=4.0, topk=40, **kwargs) 
Example #28
Source File: center_net.py    From gluon-cv with Apache License 2.0 5 votes vote down vote up
def center_net_dla34_voc(pretrained=False, pretrained_base=True, **kwargs):
    """Center net with dla34 base network on voc dataset.

    Parameters
    ----------
    classes : iterable of str
        Names of custom foreground classes. `len(classes)` is the number of foreground classes.
    pretrained_base : bool or str, optional, default is True
        Load pretrained base network, the extra layers are randomized.

    Returns
    -------
    HybridBlock
        A CenterNet detection network.

    """
    from .deconv_dla import dla34_deconv
    from ...data import VOCDetection
    classes = VOCDetection.CLASSES
    pretrained_base = False if pretrained else pretrained_base
    base_network = dla34_deconv(pretrained=pretrained_base, **kwargs)
    heads = OrderedDict([
        ('heatmap', {'num_output': len(classes), 'bias': -2.19}), # use bias = -log((1 - 0.1) / 0.1)
        ('wh', {'num_output': 2}),
        ('reg', {'num_output': 2})
        ])
    return get_center_net('dla34', 'voc', base_network=base_network, heads=heads,
                          head_conv_channel=64, pretrained=pretrained, classes=classes,
                          scale=4.0, topk=40, **kwargs) 
Example #29
Source File: center_net.py    From gluon-cv with Apache License 2.0 5 votes vote down vote up
def center_net_mobilenetv3_large_duc_coco(pretrained=False, pretrained_base=True, **kwargs):
    """Center net with mobilenetv3_large base network on coco dataset.

    Parameters
    ----------
    classes : iterable of str
        Names of custom foreground classes. `len(classes)` is the number of foreground classes.
    pretrained_base : bool or str, optional, default is True
        Load pretrained base network, the extra layers are randomized.

    Returns
    -------
    HybridBlock
        A CenterNet detection network.

    """
    from .duc_mobilenet import mobilenetv3_large_duc
    from ...data import COCODetection
    classes = COCODetection.CLASSES
    pretrained_base = False if pretrained else pretrained_base
    base_network = mobilenetv3_large_duc(pretrained=pretrained_base, **kwargs)
    heads = OrderedDict([
        ('heatmap', {'num_output': len(classes), 'bias': -2.19}), # use bias = -log((1 - 0.1) / 0.1)
        ('wh', {'num_output': 2}),
        ('reg', {'num_output': 2})
        ])
    return get_center_net('mobilenetv3_large_duc', 'coco', base_network=base_network, heads=heads,
                          head_conv_channel=64, pretrained=pretrained, classes=classes,
                          scale=4.0, topk=100, **kwargs) 
Example #30
Source File: center_net.py    From gluon-cv with Apache License 2.0 5 votes vote down vote up
def center_net_mobilenetv3_large_duc_voc(pretrained=False, pretrained_base=True, **kwargs):
    """Center net with mobilenetv3_large base network on voc dataset.

    Parameters
    ----------
    classes : iterable of str
        Names of custom foreground classes. `len(classes)` is the number of foreground classes.
    pretrained_base : bool or str, optional, default is True
        Load pretrained base network, the extra layers are randomized.

    Returns
    -------
    HybridBlock
        A CenterNet detection network.

    """
    from .duc_mobilenet import mobilenetv3_large_duc
    from ...data import VOCDetection
    classes = VOCDetection.CLASSES
    pretrained_base = False if pretrained else pretrained_base
    base_network = mobilenetv3_large_duc(pretrained=pretrained_base, **kwargs)
    heads = OrderedDict([
        ('heatmap', {'num_output': len(classes), 'bias': -2.19}), # use bias = -log((1 - 0.1) / 0.1)
        ('wh', {'num_output': 2}),
        ('reg', {'num_output': 2})
        ])
    return get_center_net('mobilenetv3_large_duc', 'voc', base_network=base_network, heads=heads,
                          head_conv_channel=64, pretrained=pretrained, classes=classes,
                          scale=4.0, topk=40, **kwargs)