Python chainer.initializers.Zero() Examples

The following are 18 code examples of chainer.initializers.Zero(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer.initializers , or try the search function .
Example #1
Source File: transform_net.py    From chainer-pointnet with MIT License 6 votes vote down vote up
def __init__(self, k=3, use_bn=True, residual=False):
        super(TransformModule, self).__init__()
        initial_bias = numpy.identity(k, dtype=numpy.float32).ravel()
        with self.init_scope():
            self.conv_block1 = ConvBlock(k, 64, ksize=1, use_bn=use_bn,
                                         residual=residual)
            self.conv_block2 = ConvBlock(64, 128, ksize=1, use_bn=use_bn,
                                         residual=residual)
            self.conv_block3 = ConvBlock(128, 1024, ksize=1, use_bn=use_bn,
                                         residual=residual)
            # [Note]
            # Original paper uses BN for fc layer as well.
            # https://github.com/charlesq34/pointnet/blob/master/models/transform_nets.py#L34
            # This chanier impl. skip BN for fc layer
            self.fc4 = links.Linear(1024, 512)
            # self.bn4 = links.BatchNormalization(512)
            self.fc5 = links.Linear(512, 256)
            # self.bn5 = links.BatchNormalization(256)

            # initial output of transform net should be identity
            self.fc6 = links.Linear(
                256, k * k, initialW=initializers.Zero(dtype=numpy.float32),
                initial_bias=initial_bias)
        self.k = k 
Example #2
Source File: alex.py    From chainer-compiler with MIT License 6 votes vote down vote up
def __init__(self):
        chainer.Chain.__init__(self)
        self.dtype = np.float16
        W = initializers.HeNormal(1 / np.sqrt(2), self.dtype)
        bias = initializers.Zero(self.dtype)

        with self.init_scope():
            self.conv1 = L.Convolution2D(None, 96, 11, stride=4,
                                         initialW=W, initial_bias=bias)
            self.conv2 = L.Convolution2D(None, 256, 5, pad=2,
                                         initialW=W, initial_bias=bias)
            self.conv3 = L.Convolution2D(None, 384, 3, pad=1,
                                         initialW=W, initial_bias=bias)
            self.conv4 = L.Convolution2D(None, 384, 3, pad=1,
                                         initialW=W, initial_bias=bias)
            self.conv5 = L.Convolution2D(None, 256, 3, pad=1,
                                         initialW=W, initial_bias=bias)
            self.fc6 = L.Linear(None, 4096, initialW=W, initial_bias=bias)
            self.fc7 = L.Linear(None, 4096, initialW=W, initial_bias=bias)
            self.fc8 = L.Linear(None, 1000, initialW=W, initial_bias=bias) 
Example #3
Source File: test_link.py    From chainer with MIT License 6 votes vote down vote up
def test_add_param(self):
        self.link.add_param('z', (2, 3))
        self.check_param_init('z', (2, 3), 'f')

        self.link.add_param('w', (2, 3), dtype='d')
        self.check_param_init('w', (2, 3), 'd')

        self.link.add_param('r')
        self.check_param_uninit('r')
        self.link.r.initialize((2, 3))
        self.check_param_init('r', (2, 3), 'f')

        self.link.add_param('s', dtype='d')
        self.check_param_uninit('s')
        self.link.s.initialize((2, 3))
        self.check_param_init('s', (2, 3), 'd')

        initializer = initializers.Zero('d')
        self.link.add_param('t', initializer=initializer)
        self.check_param_uninit('t', initializer)
        self.link.t.initialize((2, 3))
        self.check_param_init('t', (2, 3), 'd', 0) 
Example #4
Source File: network.py    From ConvLSTM with MIT License 6 votes vote down vote up
def __init__(self, inp = 256, mid = 128, sz = 3):
        super(ConvLSTM, self).__init__(
            Wxi = L.Convolution2D(inp, mid, sz, pad = sz//2),
            Whi = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True),
            Wxf = L.Convolution2D(inp, mid, sz, pad = sz//2),
            Whf = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True),
            Wxc = L.Convolution2D(inp, mid, sz, pad = sz//2),
            Whc = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True),
            Wxo = L.Convolution2D(inp, mid, sz, pad = sz//2),
            Who = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True)
        )

        self.inp = inp
        self.mid = mid
        
        self.pc = None
        self.ph = None

        with self.init_scope():
            Wci_initializer = initializers.Zero()
            self.Wci = variable.Parameter(Wci_initializer)
            Wcf_initializer = initializers.Zero()
            self.Wcf = variable.Parameter(Wcf_initializer)
            Wco_initializer = initializers.Zero()
            self.Wco = variable.Parameter(Wco_initializer) 
Example #5
Source File: test_variable.py    From chainer with MIT License 6 votes vote down vote up
def test_copydata_from_uninitialized_parameter(
            self, src_backend_config, dst_backend_config):
        shape = self.shape
        dtype = np.float32
        dst_arr_numpy = np.asarray(np.random.randn(*shape), dtype)
        dst_arr = dst_backend_config.get_array(dst_arr_numpy.copy())
        initializer = initializers.Zero()
        dst_var = chainer.Parameter(dst_arr)
        src_var = chainer.Parameter(initializer)
        src_var.to_device(src_backend_config.device)
        dst_arr_prev = dst_var.array

        dst_var.copydata(src_var)

        assert src_var.device == src_backend_config.device
        assert dst_var.device == dst_backend_config.device
        assert dst_var.array is dst_arr_prev
        np.testing.assert_array_equal(
            _numpy_device.send(dst_var.array),
            _numpy_device.send(src_var.array)) 
Example #6
Source File: multibox.py    From chainercv with MIT License 6 votes vote down vote up
def __init__(
            self, n_class, aspect_ratios,
            initialW=None, initial_bias=None):
        self.n_class = n_class
        self.aspect_ratios = aspect_ratios

        super(Multibox, self).__init__()
        with self.init_scope():
            self.loc = chainer.ChainList()
            self.conf = chainer.ChainList()

        if initialW is None:
            initialW = initializers.LeCunUniform()
        if initial_bias is None:
            initial_bias = initializers.Zero()
        init = {'initialW': initialW, 'initial_bias': initial_bias}

        for ar in aspect_ratios:
            n = (len(ar) + 1) * 2
            self.loc.add_link(L.Convolution2D(n * 4, 3, pad=1, **init))
            self.conf.add_link(L.Convolution2D(
                n * self.n_class, 3, pad=1, **init)) 
Example #7
Source File: ssd_vgg16.py    From chainercv with MIT License 6 votes vote down vote up
def __init__(self):
        init = {
            'initialW': initializers.LeCunUniform(),
            'initial_bias': initializers.Zero(),
        }
        super(VGG16Extractor512, self).__init__()
        with self.init_scope():
            self.conv8_1 = L.Convolution2D(256, 1, **init)
            self.conv8_2 = L.Convolution2D(512, 3, stride=2, pad=1, **init)

            self.conv9_1 = L.Convolution2D(128, 1, **init)
            self.conv9_2 = L.Convolution2D(256, 3, stride=2, pad=1, **init)

            self.conv10_1 = L.Convolution2D(128, 1, **init)
            self.conv10_2 = L.Convolution2D(256, 3, stride=2, pad=1, **init)

            self.conv11_1 = L.Convolution2D(128, 1, **init)
            self.conv11_2 = L.Convolution2D(256, 3, stride=2, pad=1, **init)

            self.conv12_1 = L.Convolution2D(128, 1, **init)
            self.conv12_2 = L.Convolution2D(256, 4, pad=1, **init) 
Example #8
Source File: link_batch_normalization.py    From GUINNESS with GNU General Public License v2.0 6 votes vote down vote up
def __init__(self, size, decay=0.9, eps=2e-5, dtype=numpy.float32,
                 use_gamma=True, use_beta=True,
                 initial_gamma=None, initial_beta=None):
        super(BatchNormalization, self).__init__()
        if use_gamma:
            self.add_param('gamma', size, dtype=dtype)
            if initial_gamma is None:
                initial_gamma = initializers.One()
            initializers.init_weight(self.gamma.data, initial_gamma)
        if use_beta:
            self.add_param('beta', size, dtype=dtype)
            if initial_beta is None:
                initial_beta = initializers.Zero()
            initializers.init_weight(self.beta.data, initial_beta)
        self.add_persistent('avg_mean', numpy.zeros(size, dtype=dtype))
        self.add_persistent('avg_var', numpy.zeros(size, dtype=dtype))
        self.add_persistent('N', 0)
        self.decay = decay
        self.eps = eps 
Example #9
Source File: ssd_vgg16.py    From chainercv with MIT License 6 votes vote down vote up
def __init__(self):
        init = {
            'initialW': initializers.LeCunUniform(),
            'initial_bias': initializers.Zero(),
        }
        super(VGG16Extractor300, self).__init__()
        with self.init_scope():
            self.conv8_1 = L.Convolution2D(256, 1, **init)
            self.conv8_2 = L.Convolution2D(512, 3, stride=2, pad=1, **init)

            self.conv9_1 = L.Convolution2D(128, 1, **init)
            self.conv9_2 = L.Convolution2D(256, 3, stride=2, pad=1, **init)

            self.conv10_1 = L.Convolution2D(128, 1, **init)
            self.conv10_2 = L.Convolution2D(256, 3, **init)

            self.conv11_1 = L.Convolution2D(128, 1, **init)
            self.conv11_2 = L.Convolution2D(256, 3, **init) 
Example #10
Source File: variable.py    From chainer with MIT License 5 votes vote down vote up
def zerograd(self):
        super(Parameter, self).zerograd()
        if not self.is_initialized:
            dtype = getattr(self.initializer, 'dtype', None)
            self._grad_initializer = initializers.Zero(dtype) 
Example #11
Source File: affine_channel_2d.py    From chainer-mask-rcnn with MIT License 5 votes vote down vote up
def __init__(self, channels):
        super(AffineChannel2D, self).__init__()
        with self.init_scope():
            self.W = chainer.variable.Parameter(
                initializers.One(), (channels,))
            self.b = chainer.variable.Parameter(
                initializers.Zero(), (channels,)) 
Example #12
Source File: test_vgg16.py    From chainercv with MIT License 5 votes vote down vote up
def setUp(self):
        self.link = VGG16(
            n_class=self.n_class, pretrained_model=None,
            initialW=Zero())
        self.link.pick = self.pick 
Example #13
Source File: rnn_cells.py    From knmt with GNU General Public License v3.0 5 votes vote down vote up
def create_initializer(init_type, scale=None, fillvalue=None):
    if init_type == 'identity':
        return initializers.Identity() if scale is None else initializers.Identity(scale=scale)
    if init_type == 'constant':
        return initializers.Constant(fillvalue)
    if init_type == 'zero':
        return initializers.Zero()
    if init_type == 'one':
        return initializers.One()
    if init_type == 'normal':
        return initializers.Normal() if scale is None else initializers.Normal(scale)
    if init_type == 'glorotNormal':
        return initializers.GlorotNormal() if scale is None else initializers.GlorotNormal(scale)
    if init_type == 'heNormal':
        return initializers.HeNormal() if scale is None else initializers.HeNormal(scale)
    if init_type == 'orthogonal':
        return initializers.Orthogonal(
            scale) if scale is None else initializers.Orthogonal(scale)
    if init_type == 'uniform':
        return initializers.Uniform(
            scale) if scale is None else initializers.Uniform(scale)
    if init_type == 'leCunUniform':
        return initializers.LeCunUniform(
            scale) if scale is None else initializers.LeCunUniform(scale)
    if init_type == 'glorotUniform':
        return initializers.GlorotUniform(
            scale) if scale is None else initializers.GlorotUniform(scale)
    if init_type == 'heUniform':
        return initializers.HeUniform(
            scale) if scale is None else initializers.HeUniform(scale)
    raise ValueError("Unknown initializer type: {0}".format(init_type)) 
Example #14
Source File: test_variable.py    From chainer with MIT License 5 votes vote down vote up
def test_zerograd_dtype(self):
        x = chainer.Parameter(initializers.Zero(dtype=np.float16))
        with testing.assert_warns(DeprecationWarning):
            x.zerograd()
        x.initialize((3, 2))
        assert x.grad.dtype == x.data.dtype 
Example #15
Source File: test_variable.py    From chainer with MIT License 5 votes vote down vote up
def test_initialize_dtype(self):
        initializer = initializers.Zero(np.float64)
        x = chainer.Parameter(initializer=initializer)
        x.initialize((2, 3))
        assert x.data.dtype == np.float64
        assert x.grad.dtype == np.float64 
Example #16
Source File: test_multiprocess_parallel_updater.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, dtype=numpy.float32):
        super(SimpleNet, self).__init__()
        self.dtype = dtype
        W = initializers.HeNormal(1 / numpy.sqrt(2), self.dtype)
        bias = initializers.Zero(self.dtype)
        with self.init_scope():
            self.conv = chainer.links.Convolution2D(2, 2, 3, initialW=W,
                                                    initial_bias=bias)
            self.fc = chainer.links.Linear(18, 2, initialW=W,
                                           initial_bias=bias)
        self.train = True 
Example #17
Source File: test_init.py    From chainer with MIT License 5 votes vote down vote up
def _generate_array(self, xp, dtype=None, device=None):
        initializer = initializers.Zero(dtype)
        return initializers.generate_array(initializer, (), xp, device=device) 
Example #18
Source File: layer_normalization.py    From knmt with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, size=None, eps=1e-6, initial_gamma=None,
                 initial_beta=None):
        super(LayerNormalizationLink, self).__init__()
        self.add_uninitialized_param('gamma')
        self.add_uninitialized_param('beta')
        if initial_gamma is None:
            initial_gamma = initializers.One()
        self._gamma_initializer = initial_gamma
        if initial_beta is None:
            initial_beta = initializers.Zero()
        self._beta_initializer = initial_beta
        self.eps = eps

        if size is not None:
            self._initialize_params(size)