Python chainer.Function() Examples
The following are 30
code examples of chainer.Function().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer
, or try the search function
.
Example #1
Source File: test_variable.py From chainer with MIT License | 6 votes |
def check_dtype_mismatch(self, x_data, retain): xp = backend.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): if not retain: self.retain_inputs(()) return xp.array(1, np.float32), def backward(self, inputs, grads): return xp.array([1], np.int32), x = chainer.Variable(x_data) y = DummyFunction()(x) with six.assertRaisesRegex(self, TypeError, 'dummy_function'): y.backward()
Example #2
Source File: function.py From chainer with MIT License | 6 votes |
def forward_cpu(self, inputs): """Applies forward propagation to input arrays on CPU. Args: inputs: Tuple of :class:`numpy.ndarray` object(s). Returns: tuple: Tuple of :class:`numpy.ndarray` object(s). .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ raise NotImplementedError()
Example #3
Source File: function.py From chainer with MIT License | 6 votes |
def forward(self, inputs): """Applies forward propagation to input arrays. It delegates the procedure to :meth:`forward_cpu` or :meth:`forward_gpu` by default. Which it selects is determined by the type of input arrays. Implementations of :class:`Function` must implement either CPU/GPU methods or this method. Args: inputs: Tuple of input array(s). Returns: Tuple of output array(s). .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ if any(isinstance(x, cuda.ndarray) for x in inputs): return self.forward_gpu(inputs) else: return self.forward_cpu(inputs)
Example #4
Source File: function.py From chainer with MIT License | 6 votes |
def backward_cpu(self, inputs, grad_outputs): """Applies backprop to output gradient arrays on CPU. Args: inputs: Tuple of input :class:`numpy.ndarray` object(s). grad_outputs: Tuple of output gradient :class:`numpy.ndarray` object(s). Returns: tuple: Tuple of input gradient :class:`numpy.ndarray` object(s). Some or all of them can be ``None``, if the function is not differentiable on corresponding inputs. .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ return tuple(None for _ in inputs)
Example #5
Source File: function.py From chainer with MIT License | 6 votes |
def backward_gpu(self, inputs, grad_outputs): """Applies backprop to output gradient arrays on GPU. Args: inputs: Tuple of input :class:`cupy.ndarray` object(s). grad_outputs: Tuple of output gradient :class:`cupy.ndarray` object(s). Returns: tuple: Tuple of input gradient :class:`cupy.ndarray` object(s). Some or all of them can be ``None``, if the function is not differentiable on corresponding inputs. .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ return tuple(None for _ in inputs)
Example #6
Source File: function_node.py From chainer with MIT License | 6 votes |
def check_layout_forward(self, inputs): if self.is_elementwise: if not all([x.layout == inputs[0].layout for x in inputs]): raise RuntimeError( 'Inputs with mixed memory layouts were given to ' 'an elementwise function.\n' 'Function: {}\n' 'Input layouts: {}\n'.format( self.label, ', '.join(str(x.layout) for x in inputs), )) else: if not all([x.layout is None for x in inputs]): raise RuntimeError( 'Inputs with non-standard layouts were given to ' 'a function without explicit `check_layout_forward` ' 'implementation.\n' 'Function: {}\n' 'Input layouts: {}\n'.format( self.label, ', '.join(str(x.layout) for x in inputs), ))
Example #7
Source File: test_gradient_check.py From chainer with MIT License | 6 votes |
def check_negative(self, xp, func_name, input, eps, nout): # Should be differentiable func = getattr(self, '_func_{}'.format(func_name)) grad_outputs = [ xp.random.uniform(-1, 1, input.shape).astype(input.dtype) for _ in range(nout)] def f(): return func(input) * nout try: gradient_check.numerical_grad( f, (input,), grad_outputs, eps=eps, detect_nondifferentiable=True) except gradient_check.NondifferentiableError as e: raise AssertionError( 'Function `{}` is expected to be differentiable, ' 'but determined to be non-differentiable.\n\n' 'eps: {}\n' 'input: {}\n' 'xp: {}\n\n' '{}: {}' .format( func_name, eps, input, xp.__name__, e.__class__.__name__, e))
Example #8
Source File: test_gradient_check.py From chainer with MIT License | 6 votes |
def check_positive(self, xp, func_name, input, eps, nout): # Should be non-differentiable func = getattr(self, '_func_{}'.format(func_name)) grad_outputs = [ xp.random.uniform(-1, 1, input.shape).astype(input.dtype) for _ in range(nout)] def f(): return func(input) * nout try: gradient_check.numerical_grad( f, (input,), grad_outputs, eps=eps, detect_nondifferentiable=True) except gradient_check.NondifferentiableError: pass else: raise AssertionError( 'Function `{}` is expected to be non-differentiable, ' 'but determined to be differentiable.\n\n' 'eps: {}\n' 'input: {}\n' 'xp: {}\n' ''.format( func_name, eps, input, xp.__name__))
Example #9
Source File: test_variable.py From chainer with MIT License | 6 votes |
def check_traceback(self, x_data): xp = backend.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): return xp.array(1, np.float32), def backward(self, inputs, grads): return xp.array([1, 2], np.float32), x = chainer.Variable(x_data) line = inspect.currentframe().f_lineno + 1 y = DummyFunction()(x) # `line` is THIS line try: y.backward() self.fail() except ValueError as e: assert 'Stacktrace' in str(e) assert 'line %d' % line in str(e)
Example #10
Source File: test_variable.py From chainer with MIT License | 6 votes |
def check_type_mismatch(self, x_data, retain): xp = backend.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): if not retain: self.retain_inputs(()) return xp.array(1, np.float32), def backward(self, inputs, grads): return [1] x = chainer.Variable(x_data) y = DummyFunction()(x) with six.assertRaisesRegex(self, TypeError, 'dummy_function'): y.backward()
Example #11
Source File: function_node.py From chainer with MIT License | 6 votes |
def retain_inputs(self, indexes): """Lets specified input variable nodes keep data arrays. By calling this method from :meth:`forward`, the function node can specify which inputs are required for backprop. The input variables with retained arrays can then be obtained by calling :meth:`get_retained_inputs` from inside :meth:`backward`. Unlike :class:`~chainer.Function`, the function node **DOES NOT** keep input arrays by default. If you want to keep some or all input arrays, do not forget to call this method. Note that **this method must not be called from the outside of** :meth:`forward`. Args: indexes (iterable of int): Indexes of input variables that the function will require for backprop. """ self._input_indexes_to_retain = indexes
Example #12
Source File: function_node.py From chainer with MIT License | 6 votes |
def add_hook(self, hook, name=None): """Registers a function hook. Args: hook (~chainer.FunctionHook): Function hook to be registered. name (str): Name of the function hook. The name must be unique among function hooks registered to this function. If ``None``, the default name of the function hook is used. """ if not isinstance(hook, function_hook.FunctionHook): raise TypeError('Hook must be of type FunctionHook') if name is None: name = hook.name hooks = self.local_function_hooks if name in hooks: raise KeyError('Hook %s already exists' % name) hooks[name] = hook hook.added(self)
Example #13
Source File: variable.py From chainer with MIT License | 6 votes |
def creator(self): """Function implementation that created this variable. When this variable has been created by an old-style function (i.e., it is implemented as a subclass of :class:`Function`), this property returns that :class:`Function` object. When this variable has been created by a new-style function (i.e., it is implemented as a subclass of :class:`FunctionNode` class), this property returns that node object. """ if self._has_chainerx_array: raise RuntimeError( 'A variable of ChainerX does not provide a creator.') return self._node.creator
Example #14
Source File: variable.py From chainer with MIT License | 5 votes |
def set_creator_node(self, fnode): """Notifies the variable that the given node is its creator. Args: fnode (FunctionNode): Function node that has this variable as an output. """ if self._has_chainerx_array: raise RuntimeError( 'A variable of ChainerX does not provide a creator node.') self._node.set_creator_node(fnode)
Example #15
Source File: variable.py From chainer with MIT License | 5 votes |
def creator_node(self): """Function node that has this variable as an output. See :class:`~chainer.FunctionNode` for the definition of a function node. """ return self._creator_node
Example #16
Source File: variable.py From chainer with MIT License | 5 votes |
def creator_node(self, func): if isinstance(func, chainer.Function): func = func.node self._creator_node = func if func is not None: self._rank = func.rank + 1
Example #17
Source File: variable.py From chainer with MIT License | 5 votes |
def set_creator(self, creator): """Sets a :class:`~chainer.Function` object that created this node. This method is equivalent to ``self.creator = creator``. A :class:`~chainer.FunctionNode` object can also be passed. Args: creator (Function or FunctionNode): Function that has created this variable. """ self.creator = creator
Example #18
Source File: function_node.py From chainer with MIT License | 5 votes |
def local_function_hooks(self): """Ordered dictionary of registered function hooks. Contrary to ``chainer.thread_local.function_hooks``, which registers its elements to all functions, Function hooks in this property is specific to this function. """ if self._local_function_hooks is None: self._local_function_hooks = collections.OrderedDict() return self._local_function_hooks
Example #19
Source File: function.py From chainer with MIT License | 5 votes |
def add_hook(self, hook, name=None): """Registers a function hook. See :meth:`FunctionNode.add_hook` for the detail. Args: hook(~chainer.FunctionHook): Function hook to be registered. name(str): Name of the function hook. name must be unique among function hooks registered to the function. If ``None``, default name of the function hook is used. """ self.node.add_hook(hook, name)
Example #20
Source File: variable.py From chainer with MIT License | 5 votes |
def set_creator_node(self, creator_node): """Sets a :class:`~chainer.FunctionNode` object that created this node. This method is equivalent to ``self.creator_node = creator_node``. A :class:`~chainer.Function` object can also be passed, in which case the :attr:`Function.node <chainer.Function.node>` attribute is used. Args: creator_node (FunctionNode or Function): Function node that has this variable as an output. """ self.creator_node = creator_node
Example #21
Source File: variable.py From chainer with MIT License | 5 votes |
def set_creator(self, gen_func): """Notifies the variable that the given function is its creator. Args: gen_func (Function): Function object that creates this variable as one of its outputs. """ if self._has_chainerx_array: raise RuntimeError( 'A variable of ChainerX does not provide a creator.') self._node.set_creator(gen_func)
Example #22
Source File: trpo.py From chainerrl with MIT License | 5 votes |
def _find_old_style_function(outputs): """Find old-style functions in the computational graph.""" found = [] for v in outputs: assert isinstance(v, (chainer.Variable, chainer.variable.VariableNode)) if v.creator is None: continue if isinstance(v.creator, chainer.Function): found.append(v.creator) else: assert isinstance(v.creator, chainer.FunctionNode) found.extend(_find_old_style_function(v.creator.inputs)) return found
Example #23
Source File: function.py From chainer with MIT License | 5 votes |
def node(self): """The :class:`FunctionAdapter` object that wraps this Function. If the Function does not have a node object, this property automatically creates a new one. """ noderef = self._node nd = (noderef and noderef()) or self._owned_node if nd is not None: return nd nd = FunctionAdapter(self) self._owned_node = nd return nd
Example #24
Source File: function.py From chainer with MIT License | 5 votes |
def backward(self, inputs, grad_outputs): """Applies backprop to output gradient arrays. It delegates the procedure to :meth:`backward_cpu` or :meth:`backward_gpu` by default. Which it selects is determined by the type of input arrays and output gradient arrays. Implementations of :class:`Function` must implement either CPU/GPU methods or this method, if the function is intended to be backprop-ed. Args: inputs: Tuple of input arrays. grad_outputs: Tuple of output gradient arrays. Returns: tuple: Tuple of input gradient arrays. Some or all of them can be ``None``, if the function is not differentiable on inputs. .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ if any(isinstance(x, cuda.ndarray) for x in inputs + grad_outputs): return self.backward_gpu(inputs, grad_outputs) else: return self.backward_cpu(inputs, grad_outputs)
Example #25
Source File: function.py From chainer with MIT License | 5 votes |
def function(self): """The :class:`Function` object that this adapter is wrapping.""" func = self._function if func is not None: return func weak_func = self._weak_function return weak_func and weak_func()
Example #26
Source File: test_gradient_check.py From chainer with MIT License | 5 votes |
def test_fail_function(self): # Invalid backward (chainer.Function) x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) gy = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def f(x): return self._broken_func_1()(x) with self.assertRaises(AssertionError): gradient_check.check_backward(f, x, gy)
Example #27
Source File: test_gradient_check.py From chainer with MIT License | 5 votes |
def _broken_func_1(self): class Broken(chainer.Function): def forward(self, inputs): x, = inputs return (x * x), def backward(self, inputs, grad_outputs): x, = inputs gy, = grad_outputs return 3 * x * gy, return Broken()
Example #28
Source File: test_variable.py From chainer with MIT License | 5 votes |
def test_set_fresh_creator(self): v = chainer.Variable() f = chainer.Function() v.creator = f assert v.creator is f assert v.creator_node is f.node assert v.rank == 1
Example #29
Source File: test_function.py From chainer with MIT License | 5 votes |
def setUp(self): y_shape = self.y_shape x_shape = self.x_shape y1 = make_array(1, y_shape, numpy.float32) y2 = make_array(2, y_shape, numpy.float32) gx1 = make_array(1, x_shape, numpy.float32) gx2 = None gy1 = make_array(1, y_shape, numpy.float32) gy2 = make_array(1, y_shape, numpy.float32) f = chainer.Function() f.check_type_forward = mock.MagicMock() f.forward_cpu = mock.MagicMock(return_value=(y1, y2)) f.forward_gpu = mock.MagicMock() f.backward_cpu = mock.MagicMock(return_value=(gx1, gx2)) f.backward_gpu = mock.MagicMock() self.f = f self.x1 = make_array(0, x_shape, numpy.float32) self.x2 = make_array(0, x_shape, numpy.int32) self.y1 = y1 self.y2 = y2 self.gx1 = gx1 self.gx2 = gx2 self.gy1 = gy1 self.gy2 = gy2
Example #30
Source File: test_function.py From chainer with MIT License | 5 votes |
def test_label(self): self.assertEqual(self.f.label, 'Function')