Python chainer.link.Link() Examples

The following are 16 code examples of chainer.link.Link(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer.link , or try the search function .
Example #1
Source File: optimizer.py    From chainer with MIT License 6 votes vote down vote up
def use_cleargrads(self, use=True):
        """Enables or disables use of :func:`~chainer.Link.cleargrads` in `update`.

        Args:
            use (bool): If ``True``, this function enables use of
                `cleargrads`. If ``False``, disables use of `cleargrads`
                (`zerograds` is used).

        .. deprecated:: v2.0
           Note that :meth:`update` calls :meth:`~Link.cleargrads` by default.
           :meth:`~Link.cleargrads` is more efficient than
           :meth:`~Link.zerograds`, so one does not have to call
           :meth:`use_cleargrads`. This method remains for backward
           compatibility.

        """
        warnings.warn(
            'GradientMethod.use_cleargrads is deprecated.',
            DeprecationWarning)

        self._use_cleargrads = use 
Example #2
Source File: create_chainer_model.py    From chainer-dfi with MIT License 6 votes vote down vote up
def copy_model(src, dst):
    assert isinstance(src, link.Chain)
    assert isinstance(dst, link.Chain)
    for child in src.children():
        if child.name not in dst.__dict__: continue
        dst_child = dst[child.name]
        if type(child) != type(dst_child): continue
        if isinstance(child, link.Chain):
            copy_model(child, dst_child)
        if isinstance(child, link.Link):
            match = True
            for a, b in zip(child.namedparams(), dst_child.namedparams()):
                if a[0] != b[0]:
                    match = False
                    break
                if a[1].data.shape != b[1].data.shape:
                    match = False
                    break
            if not match:
                print 'Ignore %s because of parameter mismatch' % child.name
                continue
            for a, b in zip(child.namedparams(), dst_child.namedparams()):
                b[1].data = a[1].data
            print 'Copy %s' % child.name 
Example #3
Source File: portrait_vis_evaluator.py    From portrait_matting with GNU General Public License v3.0 6 votes vote down vote up
def __init__(
            self, iterator, target, device=None,
            converter=convert.concat_examples, label_names=None,
            filename='segmmentation_iter={iteration}_idx={index}.jpg',
            mode='seg', n_processes=None):

        if isinstance(iterator, iterator_module.Iterator):
            iterator = {'main': iterator}
        self.iterators = iterator

        if isinstance(target, link.Link):
            target = {'main': target}
        self.targets = target

        self.device = device
        self.converter = converter
        self.label_names = label_names
        self.filename = filename
        self.mode = mode
        self.n_processes = n_processes or multiprocessing.cpu_count() 
Example #4
Source File: create_chainer_model.py    From chainer-fast-neuralstyle with MIT License 6 votes vote down vote up
def copy_model(src, dst):
    assert isinstance(src, link.Chain)
    assert isinstance(dst, link.Chain)
    for child in src.children():
        if child.name not in dst.__dict__: continue
        dst_child = dst[child.name]
        if type(child) != type(dst_child): continue
        if isinstance(child, link.Chain):
            copy_model(child, dst_child)
        if isinstance(child, link.Link):
            match = True
            for a, b in zip(child.namedparams(), dst_child.namedparams()):
                if a[0] != b[0]:
                    match = False
                    break
                if a[1].data.shape != b[1].data.shape:
                    match = False
                    break
            if not match:
                print('Ignore %s because of parameter mismatch' % child.name)
                continue
            for a, b in zip(child.namedparams(), dst_child.namedparams()):
                b[1].data = a[1].data
            print('Copy %s' % child.name) 
Example #5
Source File: utils.py    From Semantic-Segmentation-using-Adversarial-Networks with MIT License 5 votes vote down vote up
def copy_chainermodel(src, dst):
    from chainer import link
    assert isinstance(src, link.Chain)
    assert isinstance(dst, link.Chain)
    print('Copying layers %s -> %s:' %
          (src.__class__.__name__, dst.__class__.__name__))
    for child in src.children():
        if child.name not in dst.__dict__:
            continue
        dst_child = dst[child.name]
        if type(child) != type(dst_child):
            continue
        if isinstance(child, link.Chain):
            copy_chainermodel(child, dst_child)
        if isinstance(child, link.Link):
            match = True
            for a, b in zip(child.namedparams(), dst_child.namedparams()):
                if a[0] != b[0]:
                    match = False
                    break
                if a[1].data.shape != b[1].data.shape:
                    match = False
                    break
            if not match:
                print('Ignore %s because of parameter mismatch.' % child.name)
                continue
            for a, b in zip(child.namedparams(), dst_child.namedparams()):
                b[1].data = a[1].data
            print(' layer: %s -> %s' % (child.name, dst_child.name))



# -----------------------------------------------------------------------------
# Data Util
# ----------------------------------------------------------------------------- 
Example #6
Source File: extensions.py    From Semantic-Segmentation-using-Adversarial-Networks with MIT License 5 votes vote down vote up
def __init__(self, iterator, updater, converter=convert.concat_examples,
                 device=None, eval_hook=None):
        if isinstance(iterator, iterator_module.Iterator):
            iterator = {'main': iterator}
        self._iterators = iterator

        if isinstance(updater.model, link.Link):
            self._targets = {'main': updater.model}
        else:
            self._targets = updater.model

        self.updater = updater
        self.converter = converter
        self.device = device
        self.eval_hook = eval_hook 
Example #7
Source File: optimizer.py    From chainer with MIT License 5 votes vote down vote up
def setup(self, link):
        """Sets a target link and initializes the optimizer states.

        Given link is set to the :attr:`target` attribute. It also prepares the
        optimizer state dictionaries corresponding to all parameters in the
        link hierarchy. The existing states are discarded.

        Args:
            link (~chainer.Link): Target link object.

        Returns:
            The optimizer instance.

        .. note::
           As of v4.0.0, this function returns the optimizer instance itself
           so that you can instantiate and setup the optimizer in one line,
           e.g., ``optimizer = SomeOptimizer().setup(link)``.

        """
        if not isinstance(link, link_module.Link):
            raise TypeError('optimization target must be a link')
        self.target = link
        self.t = 0
        self.epoch = 0

        self._hookable = _OptimizerHookable(self)
        return self 
Example #8
Source File: optimizer.py    From chainer with MIT License 5 votes vote down vote up
def update(self, lossfun=None, *args, **kwds):
        """Updates the parameters.

        This method updates the parameters of the target link. The behavior of
        this method is different for the cases either ``lossfun`` is given or
        not.

        If ``lossfun`` is given, this method typically clears the gradients,
        calls the loss function with given extra arguments, and calls the
        :meth:`~chainer.Variable.backward` method of its output to compute the
        gradients. The actual implementation might call ``lossfun`` more than
        once.

        If ``lossfun`` is not given, then this method assumes that the
        gradients of all parameters are already computed. An implementation
        that requires multiple gradient computations might raise an error on
        this case.

        In both cases, this method invokes the update procedure for all
        parameters.

        Args:
            lossfun (callable):
                Loss function.
                You can specify one of loss functions from
                :doc:`built-in loss functions </reference/functions>`, or
                your own loss function.
                It should not be an
                :doc:`loss functions with parameters </reference/links>`
                (i.e., :class:`~chainer.Link` instance).
                The function must accept arbitrary arguments
                and return one :class:`~chainer.Variable` object that
                represents the loss (or objective) value.
                Returned value must be a Variable derived from the input
                Variable object.
                ``lossfun`` can be omitted for single gradient-based methods.
                In this case, this method assumes gradient arrays computed.
            args, kwds: Arguments for the loss function.

        """
        raise NotImplementedError 
Example #9
Source File: sequential.py    From chainer with MIT License 5 votes vote down vote up
def __delitem__(self, i):
        layer = self._layers.pop(i)
        if isinstance(layer, _link.Link):
            for i, link in enumerate(self._children):
                if link.name == layer.name:
                    del self._children[i]
                    break
            for j, layer in enumerate(self._children[i:]):
                layer.name = str(i + j) 
Example #10
Source File: sequential.py    From chainer with MIT License 5 votes vote down vote up
def insert(self, i, layer):
        n = len(self._layers)
        if not (-n <= i < (n + 1)):
            raise IndexError(
                'Index out of range: {}'.format(i))
        if i < 0:
            i += n
        if not callable(layer):
            raise ValueError(
                'All elements of the argument should be callable. But '
                'given {} is not callable.'.format(layer))

        self._layers.insert(i, layer)
        if isinstance(layer, _link.Link):
            if i == 0:
                self._children.insert(0, layer)
            else:
                if i < 0:
                    i = len(self._layers) + i
                last_link_pos = 0
                for j in range(i - 1, -1, -1):
                    # The last link before the given position
                    if isinstance(self._layers[j], _link.Link):
                        last_link_pos = j
                self._children.insert(last_link_pos + 1, layer)
            for i, layer in enumerate(self._children):
                layer.name = str(i) 
Example #11
Source File: sequential.py    From chainer with MIT License 5 votes vote down vote up
def count_by_layer_type(self, type_name):
        """Count the number of layers by layer type.

        This method counts the number of layers which have the name given by
        the argument ``type_name``. For example, if you want to know the number
        of :class:`~links.Linear` layers included in this model, ``type_name``
        should be ``Linear``. If you want to know the number of
        :class:`~Function` classes or user-defined functions which have a
        specific name, ``type_name`` should be the function name, e.g.,
        ``relu`` or ``reshape``, etc.

        Args:
            type_name (str): The class or function name of a layer you want to
                enumerate.

        """

        num = 0
        for layer in self._layers:
            if isinstance(layer, _link.Link):
                if layer.__class__.__name__ == type_name:
                    num += 1
            else:
                if layer.__name__ == type_name:
                    num += 1
        return num 
Example #12
Source File: sequential.py    From chainer with MIT License 5 votes vote down vote up
def copy(self, mode='share'):
        ret = Sequential()
        for layer in self:
            if isinstance(layer, _link.Link):
                ret.append(layer.copy(mode))
            else:
                ret.append(copy.copy(layer))
        return ret 
Example #13
Source File: sequential.py    From chainer with MIT License 5 votes vote down vote up
def copyparams(self, link, copy_persistent=True):
        if not isinstance(link, Sequential):
            raise ValueError('Objects other than Sequential object cannot be '
                             'copied to a Sequential object.')
        for idx, child in enumerate(self):
            if isinstance(child, _link.Link):
                child.copyparams(link[idx], copy_persistent) 
Example #14
Source File: evaluator.py    From chainer with MIT License 5 votes vote down vote up
def __init__(self, iterator, target, converter=convert.concat_examples,
                 device=None, eval_hook=None, eval_func=None, **kwargs):
        progress_bar, = argument.parse_kwargs(kwargs, ('progress_bar', False))

        if device is not None:
            device = backend.get_device(device)

        if isinstance(iterator, iterator_module.Iterator):
            iterator = {'main': iterator}
        self._iterators = iterator

        if isinstance(target, link.Link):
            target = {'main': target}
        self._targets = target

        self.converter = converter
        self.device = device
        self.eval_hook = eval_hook
        self.eval_func = eval_func

        self._progress_bar = progress_bar

        for key, iter in six.iteritems(iterator):
            if (isinstance(iter, (iterators.SerialIterator,
                                  iterators.MultiprocessIterator,
                                  iterators.MultithreadIterator)) and
                    getattr(iter, 'repeat', False)):
                msg = 'The `repeat` property of the iterator {} '
                'is set to `True`. Typically, the evaluator sweeps '
                'over iterators until they stop, '
                'but as the property being `True`, this iterator '
                'might not stop and evaluation could go into '
                'an infinite loop. '
                'We recommend to check the configuration '
                'of iterators'.format(key)
                warnings.warn(msg) 
Example #15
Source File: MyEvaluator.py    From HFT-CNN with MIT License 5 votes vote down vote up
def __init__(self, iterator, target, class_dim, converter=convert.concat_examples,  
                 device=None, eval_hook=None, eval_func=None):
        if isinstance(iterator, iterator_module.Iterator):
            iterator = {'main': iterator}
        self._iterators = iterator

        if isinstance(target, link.Link):
            target = {'main': target}
        self._targets = target

        self.converter = converter
        self.device = device
        self.eval_hook = eval_hook
        self.eval_func = eval_func
        self.class_dim = class_dim 
Example #16
Source File: n_step_rnn.py    From chainer with MIT License 4 votes vote down vote up
def __init__(self, n_layers, in_size, out_size, dropout,
                 *, initialW=None, initial_bias=None, **kwargs):
        if kwargs:
            argument.check_unexpected_kwargs(
                kwargs,
                use_cudnn='use_cudnn argument is not supported anymore. '
                'Use chainer.using_config',
                use_bi_direction='use_bi_direction is not supported anymore',
                activation='activation is not supported anymore')
            argument.assert_kwargs_empty(kwargs)

        weights = []
        if self.use_bi_direction:
            direction = 2
        else:
            direction = 1

        W_initializer = initializers._get_initializer(initialW)
        if initial_bias is None:
            initial_bias = 0
        bias_initializer = initializers._get_initializer(initial_bias)

        for i in six.moves.range(n_layers):
            for di in six.moves.range(direction):
                weight = link.Link()
                with weight.init_scope():
                    for j in six.moves.range(self.n_weights):
                        if i == 0 and j < self.n_weights // 2:
                            w_in = in_size
                        elif i > 0 and j < self.n_weights // 2:
                            w_in = out_size * direction
                        else:
                            w_in = out_size
                        w = variable.Parameter(W_initializer, (out_size, w_in))
                        b = variable.Parameter(bias_initializer, out_size)
                        setattr(weight, 'w%d' % j, w)
                        setattr(weight, 'b%d' % j, b)
                weights.append(weight)

        super(NStepRNNBase, self).__init__(*weights)

        self.ws = [[getattr(layer, 'w%d' % i)
                    for i in six.moves.range(self.n_weights)]
                   for layer in self]
        self.bs = [[getattr(layer, 'b%d' % i)
                    for i in six.moves.range(self.n_weights)]
                   for layer in self]

        self.n_layers = n_layers
        self.dropout = dropout
        self.out_size = out_size
        self.direction = direction