Python chainer.variable.Parameter() Examples

The following are 30 code examples of chainer.variable.Parameter(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer.variable , or try the search function .
Example #1
Source File: link.py    From chainer with MIT License 6 votes vote down vote up
def params(
            self,
            include_uninit: bool = True
    ) -> tp.Iterator['chainer.Parameter']:
        """Returns a generator of all parameters under the link hierarchy.

        Args:
            include_uninit (bool): If ``True``, it also generates uninitialized
                parameters.

        Returns:
            A generator object that generates all parameters.

        """
        d = self.__dict__  # type: tp.Dict[str, chainer.Parameter]
        for name in sorted(self._params):
            if include_uninit or d[name].data is not None:
                yield d[name] 
Example #2
Source File: simplified_dropconnect.py    From chainer with MIT License 6 votes vote down vote up
def __init__(self, in_size, out_size, ratio=.5, nobias=False,
                 initialW=None, initial_bias=None):
        super(SimplifiedDropconnect, self).__init__()

        self.out_size = out_size
        self.ratio = ratio

        if initialW is None:
            initialW = initializers.HeNormal(1. / numpy.sqrt(2))

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_size is not None:
                self._initialize_params(in_size)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = initializers.Constant(0)
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_size) 
Example #3
Source File: convolution_2d.py    From chainer with MIT License 6 votes vote down vote up
def forward(self, x):
        """Applies the convolution layer.

        Args:
            x (~chainer.Variable): Input image.

        Returns:
            ~chainer.Variable: Output of the convolution.

        """
        x = chainer.as_variable(x)
        assert x.layout == self.x_layout
        # self.W can be a Variable instead of Parameter: #8462
        # TODO(niboshi): Use Parameter.is_initialized.
        if self.W.raw_array is None:
            _, c, _, _ = memory_layouts.get_semantic_shape(
                x, assumed_layout=self.x_layout)
            self._initialize_params(c)
        return convolution_2d.convolution_2d(
            x, self.W, self.b, self.stride, self.pad, dilate=self.dilate,
            groups=self.groups, cudnn_fast=self.cudnn_fast) 
Example #4
Source File: local_convolution_2d.py    From chainer with MIT License 6 votes vote down vote up
def __init__(self, in_channels, out_channels, in_size=None, ksize=None,
                 stride=1, nobias=False, initialW=None, initial_bias=None,
                 **kwargs):
        super(LocalConvolution2D, self).__init__()
        self.ksize = ksize
        self.stride = _pair(stride)
        self.nobias = nobias
        self.out_channels = out_channels
        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer)

            if in_channels is not None and in_size is not None:
                self._initialize_params(in_channels, _pair(in_size)) 
Example #5
Source File: link.py    From chainer with MIT License 6 votes vote down vote up
def count_params(self) -> int:
        """Counts the total number of parameters.

        This method counts the total number of scalar values included in all
        the :class:`~chainer.Parameter`\\ s held by this link and its
        descendants.

        If the link containts uninitialized parameters, this method raises a
        warning.

        Returns:
            The total size of parameters (int)

        """

        size = 0
        for name, param in self.namedparams():
            if param.array is None:
                warnings.warn(
                    'Parameter \'{}\' has not been initialized, so the '
                    'resulting count will not include the number of parameters'
                    ' in it.'.format(name))
                continue
            size += param.size
        return size 
Example #6
Source File: link.py    From chainer with MIT License 6 votes vote down vote up
def serialize(self, serializer: 'chainer.AbstractSerializer') -> None:
        """Serializes the link object.

        Args:
            serializer (~chainer.AbstractSerializer): Serializer object.

        """
        d = self.__dict__  # type: tp.Dict[str, chainer.Parameter]
        for name in self._params:
            param = d[name]
            data = serializer(name, param.data)  # type: types.NdArray
            if param.data is None and data is not None:
                # Initialize the parameter here
                param.initialize(data.shape)
                with chainer.using_device(param.device):
                    param.data[...] = param.device.send(data)
        for name in self._persistent:
            d[name] = serializer(name, d[name]) 
Example #7
Source File: link.py    From chainer with MIT License 6 votes vote down vote up
def namedparams(
            self,
            include_uninit: bool = True
    ) -> tp.Iterator[tp.Tuple[str, 'chainer.Parameter']]:
        """Returns a generator of all (path, param) pairs under the hierarchy.

        Args:
            include_uninit (bool): If ``True``, it also generates uninitialized
                parameters.

        Returns:
            A generator object that generates all (path, parameter) pairs. The
            paths are relative from this link.

        """
        d = self.__dict__  # type: tp.Dict[str, chainer.Parameter]
        for name in sorted(self._params):
            if include_uninit or d[name].is_initialized:
                yield '/' + name, d[name] 
Example #8
Source File: group_normalization.py    From chainer with MIT License 6 votes vote down vote up
def __init__(self, groups, size=None, eps=1e-5, initial_gamma=None,
                 initial_beta=None):
        super(GroupNormalization, self).__init__()
        if initial_gamma is None:
            initial_gamma = 1
        if initial_beta is None:
            initial_beta = 0

        highprec_dtype = chainer.get_dtype(
            None, map_mixed16=numpy.float32)

        with self.init_scope():
            self.groups = groups
            gamma_initializer = \
                initializers._get_initializer(initial_gamma)
            gamma_initializer.dtype = highprec_dtype
            beta_initializer = \
                initializers._get_initializer(initial_beta)
            beta_initializer.dtype = highprec_dtype
            self.gamma = variable.Parameter(gamma_initializer)
            self.beta = variable.Parameter(beta_initializer)
            self.eps = eps

        if size is not None:
            self._initialize_params(size) 
Example #9
Source File: MyCRFLayer.py    From CRF-Layer-on-the-Top-of-BiLSTM with MIT License 6 votes vote down vote up
def __init__(self, n_label):
        super(My_CRF, self).__init__(n_label)
        with self.init_scope():
            '''
            [Initialization]
            '''
            # Generate random values for transition matrix.
            # The shape of transition matrix is (n_label+2, n_label+2).
            # "2" means the extra added labels, START and END. (see 3.2)
            drange = np.sqrt(6. / (np.sum((n_label + 2, n_label + 2))))
            value = drange * np.random.uniform(low=-1.0, high=1.0, size=(n_label + 2, n_label + 2))
            transitions = np.array(value, dtype=np.float32)
            self.cost = variable.Parameter(transitions)

            # The number of unique labels in training data set (e.g B-Person, I-Person, O)
            self.n_label = n_label

            # The small value will fill the expanded emission score matrix as described in 3.2
            self.small = -1000 
Example #10
Source File: network.py    From ConvLSTM with MIT License 6 votes vote down vote up
def __init__(self, inp = 256, mid = 128, sz = 3):
        super(ConvLSTM, self).__init__(
            Wxi = L.Convolution2D(inp, mid, sz, pad = sz//2),
            Whi = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True),
            Wxf = L.Convolution2D(inp, mid, sz, pad = sz//2),
            Whf = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True),
            Wxc = L.Convolution2D(inp, mid, sz, pad = sz//2),
            Whc = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True),
            Wxo = L.Convolution2D(inp, mid, sz, pad = sz//2),
            Who = L.Convolution2D(mid, mid, sz, pad = sz//2, nobias = True)
        )

        self.inp = inp
        self.mid = mid
        
        self.pc = None
        self.ph = None

        with self.init_scope():
            Wci_initializer = initializers.Zero()
            self.Wci = variable.Parameter(Wci_initializer)
            Wcf_initializer = initializers.Zero()
            self.Wcf = variable.Parameter(Wcf_initializer)
            Wco_initializer = initializers.Zero()
            self.Wco = variable.Parameter(Wco_initializer) 
Example #11
Source File: scale.py    From chainer-stylegan with MIT License 6 votes vote down vote up
def __init__(self, axis=1, W_shape=None, bias_term=False, bias_shape=None, initialW=None, initial_bias=None):
        super(Scale, self).__init__()
        self.axis = axis

        with self.init_scope():
            # Add W parameter and/or bias term.
            if W_shape is not None:
                if initialW is None:
                    initialW = 1
                W_initializer = initializers._get_initializer(initialW)
                self.W = variable.Parameter(W_initializer, W_shape)
                if bias_term:
                    self.bias = Bias(axis, W_shape, initial_bias)
            else:
                if bias_term:
                    if bias_shape is None:
                        raise ValueError(
                            'bias_shape should be given if W is not '
                            'learnt parameter and bias_term is True.')
                    self.bias = Bias(axis, W_shape, initial_bias) 
Example #12
Source File: test_sequential.py    From chainer with MIT License 6 votes vote down vote up
def test_serialize(self):
        l1 = links.Linear(None, 1)
        l2 = links.Linear(None, 3)
        with l2.init_scope():
            l2.x = variable.Parameter(0, 2)
        s1 = chainer.Sequential(l1, l2)
        mocks = {'0': mock.MagicMock(), '1': mock.MagicMock()}
        serializer = mock.MagicMock()
        serializer.__getitem__.side_effect = lambda k: mocks[k]
        serializer.return_value = None
        mocks['0'].return_value = None
        mocks['1'].return_value = None
        s1.serialize(serializer)

        self.assertEqual(serializer.call_count, 0)
        self.assertEqual(serializer.__getitem__.call_count, 2)
        serializer.__getitem__.assert_any_call('0')
        serializer.__getitem__.assert_any_call('1')

        mocks['0'].assert_any_call('W', None)
        mocks['0'].assert_any_call('b', l1.b.data)
        mocks['1'].assert_any_call('W', None)
        mocks['1'].assert_any_call('b', l2.b.data)
        mocks['1'].assert_any_call('x', l2.x.data) 
Example #13
Source File: matting_link.py    From portrait_matting with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, init_lambda=100):
        super().__init__()
        with self.init_scope():
            self.lambda_ = variable.Parameter(init_lambda, shape=(1,)) 
Example #14
Source File: convolution_nd.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, ndim, in_channels, out_channels, ksize=None, stride=1,
                 pad=0, nobias=False, initialW=None, initial_bias=None,
                 cover_all=False, dilate=1, groups=1):
        super(ConvolutionND, self).__init__()

        if ksize is None:
            out_channels, ksize, in_channels = \
                in_channels, out_channels, None

        self.out_channels = out_channels
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.cover_all = cover_all
        self.dilate = conv_nd.as_tuple(dilate, ndim)
        self.groups = int(groups)

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels) 
Example #15
Source File: deformable_convolution_2d.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0,
                 nobias=False, initialW=None, initial_bias=None):
        super(DeformableConvolution2DSampler, self).__init__()

        self.ksize = ksize
        self.stride = _pair(stride)
        self.pad = _pair(pad)
        self.out_channels = out_channels
        self.initialW = initialW

        if initialW is None:
            initialW = constant.Zero()

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = initializers.Constant(0)
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer)

        if in_channels is not None:
            self._initialize_params(in_channels) 
Example #16
Source File: embed_id.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, in_size, out_size, initialW=None, ignore_label=None):
        super(EmbedID, self).__init__()
        self.ignore_label = ignore_label

        with self.init_scope():
            if initialW is None:
                initialW = normal.Normal(1.0)
            self.W = variable.Parameter(initialW, (in_size, out_size)) 
Example #17
Source File: deconvolution_nd.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, ndim, in_channels, out_channels, ksize=None, stride=1,
                 pad=0, nobias=False, outsize=None, initialW=None,
                 initial_bias=None, dilate=1, groups=1):
        super(DeconvolutionND, self).__init__()

        if ksize is None:
            out_channels, ksize, in_channels = \
                in_channels, out_channels, None

        self.out_channels = out_channels
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.outsize = outsize
        self.dilate = conv_nd.as_tuple(dilate, ndim)
        self.groups = int(groups)

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels) 
Example #18
Source File: depthwise_convolution_2d.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, in_channels, channel_multiplier, ksize, stride=1, pad=0,
                 nobias=False, initialW=None, initial_bias=None):
        super(DepthwiseConvolution2D, self).__init__()
        self.ksize = ksize
        self.stride = _pair(stride)
        self.pad = _pair(pad)
        self.channel_multiplier = channel_multiplier
        self.nobias = nobias

        if initialW is None:
            initialW = initializers.HeNormal(1. / numpy.sqrt(2))

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = initializers.Constant(0)
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer)

        if in_channels is not None:
            self._initialize_params(in_channels) 
Example #19
Source File: deconvolution_2d.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0,
                 nobias=False, outsize=None, initialW=None, initial_bias=None,
                 **kwargs):
        super(Deconvolution2D, self).__init__()

        dilate, groups, = argument.parse_kwargs(
            kwargs, ('dilate', 1), ('groups', 1),
            deterministic='deterministic argument is not supported anymore. '
            'Use chainer.using_config(\'cudnn_deterministic\', value) '
            'context where value is either `True` or `False`.')

        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.ksize = ksize
        self.stride = _pair(stride)
        self.pad = _pair(pad)
        self.dilate = _pair(dilate)
        self.outsize = (None, None) if outsize is None else outsize
        self.out_channels = out_channels
        self.groups = int(groups)

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if isinstance(initial_bias, (numpy.ndarray, cuda.ndarray)):
                    assert initial_bias.shape == (out_channels,)
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_channels) 
Example #20
Source File: adaptive_softmax.py    From models with MIT License 5 votes vote down vote up
def __init__(self, n_units, n_vocab,
                 cutoff=[2000, 10000], reduce_k=4):
        super(AdaptiveSoftmaxOutputLayer, self).__init__()
        assert(all(c < n_vocab - 1 for c in cutoff))
        self.n_clusters = len(cutoff) + 1
        self.n_tails = self.n_clusters - 1

        cutoff.append(n_vocab)
        initializer = chainer.initializers._get_initializer(None)
        with self.init_scope():
            self.head = variable.Parameter(initializer=initializer)
            self.head.initialize((cutoff[0] + self.n_tails, n_units))

            tail_units = n_units
            for i in range(1, self.n_tails + 1):
                tail_units = tail_units // reduce_k
                n_comp_words = cutoff[i] - cutoff[i - 1]
                assert(tail_units > 0)
                assert(n_comp_words > 0)

                self.add_param('reduce{}'.format(i), initializer=initializer)
                getattr(self, 'reduce{}'.format(i)).initialize(
                    (tail_units, n_units))
                self.add_param('tail{}'.format(i), initializer=initializer)
                getattr(self, 'tail{}'.format(i)).initialize(
                    (n_comp_words, tail_units))

            cutoff = self.xp.array([0] + cutoff, dtype=np.int32)
            assert(len(cutoff) == self.n_clusters + 1)
            self.add_param('cutoff', cutoff.shape, dtype='f')
            self.cutoff.data[:] = cutoff 
Example #21
Source File: bias.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, axis=1, shape=None):
        super(Bias, self).__init__()

        # Add b parameter if given.
        if shape is not None:
            with self.init_scope():
                self.b = variable.Parameter(0, shape)

        self.axis = axis 
Example #22
Source File: active_bn.py    From voxelnet_chainer with MIT License 5 votes vote down vote up
def __init__(self, size, decay=0.9, eps=2e-5, dtype=numpy.float32,
                 use_gamma=True, use_beta=True,
                 initial_gamma=None, initial_beta=None):
        super(BatchNormalization, self).__init__()
        self.avg_mean = numpy.zeros(size, dtype=dtype)
        self.register_persistent('avg_mean')
        self.avg_var = numpy.zeros(size, dtype=dtype)
        self.register_persistent('avg_var')
        self.N = 0
        self.register_persistent('N')
        self.decay = decay
        self.eps = eps

        with self.init_scope():
            if use_gamma:
                if initial_gamma is None:
                    initial_gamma = 1
                initial_gamma = initializers._get_initializer(initial_gamma)
                initial_gamma.dtype = dtype
                self.gamma = variable.Parameter(initial_gamma, size)
            if use_beta:
                if initial_beta is None:
                    initial_beta = 0
                initial_beta = initializers._get_initializer(initial_beta)
                initial_beta.dtype = dtype
                self.beta = variable.Parameter(initial_beta, size) 
Example #23
Source File: active_batchnorm.py    From voxelnet_chainer with MIT License 5 votes vote down vote up
def __init__(self, size, decay=0.9, eps=2e-5, dtype=numpy.float32,
                 use_gamma=True, use_beta=True,
                 initial_gamma=None, initial_beta=None):
        super(ActiveBatchNormalization, self).__init__()
        self.avg_mean = numpy.zeros(size, dtype=dtype)
        self.register_persistent('avg_mean')
        self.avg_var = numpy.zeros(size, dtype=dtype)
        self.register_persistent('avg_var')
        self.N = 0
        self.register_persistent('N')
        self.decay = decay
        self.eps = eps

        with self.init_scope():
            if use_gamma:
                if initial_gamma is None:
                    initial_gamma = 1
                initial_gamma = initializers._get_initializer(initial_gamma)
                initial_gamma.dtype = dtype
                self.gamma = variable.Parameter(initial_gamma, size)
            if use_beta:
                if initial_beta is None:
                    initial_beta = 0
                initial_beta = initializers._get_initializer(initial_beta)
                initial_beta.dtype = dtype
                self.beta = variable.Parameter(initial_beta, size) 
Example #24
Source File: swish.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, beta_shape, beta_init=1.0):
        super(Swish, self).__init__()

        with self.init_scope():
            if beta_shape is not None:
                self.beta = variable.Parameter(beta_init, beta_shape)
            else:
                beta_init = initializers.Constant(beta_init)
                self.beta = variable.Parameter(beta_init) 
Example #25
Source File: layer_normalization.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, size=None, eps=1e-6, initial_gamma=None,
                 initial_beta=None):
        super(LayerNormalization, self).__init__()
        if initial_gamma is None:
            initial_gamma = 1
        if initial_beta is None:
            initial_beta = 0

        with self.init_scope():
            self.gamma = variable.Parameter(initial_gamma)
            self.beta = variable.Parameter(initial_beta)
            self.eps = eps

        if size is not None:
            self._initialize_params(size) 
Example #26
Source File: deterministic_embed_id.py    From espnet with Apache License 2.0 5 votes vote down vote up
def __init__(self, in_size, out_size, initialW=None, ignore_label=None):
        super(EmbedID, self).__init__()
        self.ignore_label = ignore_label

        with self.init_scope():
            if initialW is None:
                initialW = normal.Normal(1.0)
            self.W = variable.Parameter(initialW, (in_size, out_size)) 
Example #27
Source File: normalize.py    From chainercv with MIT License 5 votes vote down vote up
def __init__(self, n_channel, initial=0, eps=1e-5):
        super(Normalize, self).__init__()
        self.eps = eps
        with self.init_scope():
            initializer = initializers._get_initializer(initial)
            self.scale = variable.Parameter(initializer)
            self.scale.initialize((n_channel),) 
Example #28
Source File: prelu.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, shape=(), init=0.25):
        super(PReLU, self).__init__()
        with self.init_scope():
            self.W = variable.Parameter(init, shape) 
Example #29
Source File: bias.py    From chainer-stylegan with MIT License 5 votes vote down vote up
def __init__(self, axis=1, shape=None, initial_bias=None):
        super(Bias, self).__init__()

        # Add b parameter if given.
        if shape is not None:
            with self.init_scope():
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, shape)

        self.axis = axis 
Example #30
Source File: batch_normalization.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, size, comm, decay=0.9, eps=2e-5, dtype=None,
                 use_gamma=True, use_beta=True,
                 initial_gamma=None, initial_beta=None,
                 communication_backend='auto'):
        chainer.utils.experimental(
            'chainermn.links.MultiNodeBatchNormalization')

        super(MultiNodeBatchNormalization, self).__init__()
        self._highprec_dtype = chainer.get_dtype(
            dtype, map_mixed16=numpy.float32)
        self.comm = comm
        self.avg_mean = numpy.zeros(size, dtype=self._highprec_dtype)
        self.register_persistent('avg_mean')
        self.avg_var = numpy.zeros(size, dtype=self._highprec_dtype)
        self.register_persistent('avg_var')
        self.N = 0
        self.register_persistent('N')
        self.decay = decay
        self.eps = eps

        self._communication_backend = \
            chainermn_batch_normalization.get_communication_backend(
                comm, communication_backend)

        with self.init_scope():
            if use_gamma:
                if initial_gamma is None:
                    initial_gamma = 1
                initial_gamma = initializers._get_initializer(initial_gamma)
                initial_gamma.dtype = self._highprec_dtype
                self.gamma = variable.Parameter(initial_gamma, size)
            if use_beta:
                if initial_beta is None:
                    initial_beta = 0
                initial_beta = initializers._get_initializer(initial_beta)
                initial_beta.dtype = self._highprec_dtype
                self.beta = variable.Parameter(initial_beta, size)