Python chainer.Link() Examples
The following are 30
code examples of chainer.Link().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer
, or try the search function
.
Example #1
Source File: test_link.py From chainer with MIT License | 6 votes |
def test_serialize(self): l1 = chainer.Link() with l1.init_scope(): l1.y = chainer.Parameter(shape=(1, 1)) l2 = chainer.Link() with l2.init_scope(): l2.x = chainer.Parameter(0, 2) c1 = chainer.ChainList(l1, l2) mocks = {'0': mock.MagicMock(), '1': mock.MagicMock()} serializer = mock.MagicMock() serializer.__getitem__.side_effect = lambda k: mocks[k] serializer.return_value = None mocks['0'].return_value = None mocks['1'].return_value = None c1.serialize(serializer) self.assertEqual(serializer.call_count, 0) self.assertEqual(serializer.__getitem__.call_count, 2) serializer.__getitem__.assert_any_call('0') serializer.__getitem__.assert_any_call('1') mocks['0'].assert_called_with('y', l1.y.data) mocks['1'].assert_called_with('x', l2.x.data)
Example #2
Source File: test_link.py From chainer with MIT License | 6 votes |
def test_deserialize(self, backend_config): # Deserializes uninitialized parameters into uninitialied ones. call_record = [] def serializer(key, value): call_record.append((key, value)) return None # to be uninitialized l = chainer.Link() with l.init_scope(): l.x = chainer.Parameter() # uninitialized l.to_device(backend_config.device) l.serialize(serializer) # Link is kept uninitialized self.assertIsNone(l.x.array) # Check inputs to the serializer self.assertEqual(len(call_record), 1) self.assertEqual(call_record[0][0], 'x') self.assertIs(call_record[0][1], None)
Example #3
Source File: test_link.py From chainer with MIT License | 6 votes |
def setUp(self): x_shape_0 = 2 x_shape_1 = numpy.int64(3) self.link = chainer.Link(x=((x_shape_0, x_shape_1), 'd'), u=(None, 'd')) with self.link.init_scope(): self.link.y = chainer.Parameter(shape=(2,)) self.link.v = chainer.Parameter() self.p = numpy.array([1, 2, 3], dtype='f') self.link.add_persistent('p', self.p) self.link.name = 'a' self.link.x.update_rule = chainer.UpdateRule() self.link.x.update_rule.enabled = False self.link.u.update_rule = chainer.UpdateRule() if cuda.available: self.current_device_id = cuda.cupy.cuda.get_device_id()
Example #4
Source File: test_link.py From chainer with MIT License | 6 votes |
def test_serialize(self, backend_config): call_record = [] def serializer(key, value): call_record.append((key, value)) return value l = chainer.Link() with l.init_scope(): l.x = chainer.Parameter() # uninitialized l.to_device(backend_config.device) l.serialize(serializer) # Link is kept uninitialized self.assertIsNone(l.x.array) # Check inputs to the serializer self.assertEqual(len(call_record), 1) self.assertEqual(call_record[0][0], 'x') self.assertIs(call_record[0][1], None)
Example #5
Source File: test_optimizers.py From chainer with MIT License | 6 votes |
def test_adam_w(self, backend_config): xp = backend_config.xp device = backend_config.device link = chainer.Link(x=(1,)) link.to_device(device) opt = optimizers.Adam(eta=0.5, weight_decay_rate=0.1) opt.setup(link) link.x.data.fill(1) link.x.grad = device.send(xp.ones_like(link.x.data)) opt.update() # compare against the value computed with v5 impl testing.assert_allclose(link.x.data, np.array([0.9495]), atol=1e-7, rtol=1e-7)
Example #6
Source File: link_hook.py From chainer with MIT License | 6 votes |
def forward_postprocess( self, args: _ForwardPostprocessCallbackArgs ) -> None: """Callback function invoked after a forward call of a link. Args: args: Callback data. It has the following attributes: * link (:class:`~chainer.Link`) Link object. * forward_name (:class:`str`) Name of the forward method. * args (:class:`tuple`) Non-keyword arguments given to the forward method. * kwargs (:class:`dict`) Keyword arguments given to the forward method. * out Return value of the forward method. """ pass
Example #7
Source File: dqn.py From chainerrl with MIT License | 6 votes |
def _batch_reset_recurrent_states_when_episodes_end( model, batch_done, batch_reset, recurrent_states): """Reset recurrent states when episodes end. Args: model (chainer.Link): Model that implements `StatelessRecurrent`. batch_done (array-like of bool): True iff episodes are terminal. batch_reset (array-like of bool): True iff episodes will be reset. recurrent_states (object): Recurrent state. Returns: object: New recurrent states. """ indices_that_ended = [ i for i, (done, reset) in enumerate(zip(batch_done, batch_reset)) if done or reset] if indices_that_ended: return model.mask_recurrent_state_at( recurrent_states, indices_that_ended) else: return recurrent_states
Example #8
Source File: optimizer.py From chainer with MIT License | 6 votes |
def use_cleargrads(self, use=True): """Enables or disables use of :func:`~chainer.Link.cleargrads` in `update`. Args: use (bool): If ``True``, this function enables use of `cleargrads`. If ``False``, disables use of `cleargrads` (`zerograds` is used). .. deprecated:: v2.0 Note that :meth:`update` calls :meth:`~Link.cleargrads` by default. :meth:`~Link.cleargrads` is more efficient than :meth:`~Link.zerograds`, so one does not have to call :meth:`use_cleargrads`. This method remains for backward compatibility. """ warnings.warn( 'GradientMethod.use_cleargrads is deprecated.', DeprecationWarning) self._use_cleargrads = use
Example #9
Source File: async_.py From chainerrl with MIT License | 6 votes |
def extract_params_as_shared_arrays(link): assert isinstance(link, chainer.Link) shared_arrays = {} for param_name, param in link.namedparams(): typecode = param.array.dtype.char shared_arrays[param_name] = mp.RawArray(typecode, param.array.ravel()) for persistent_name, persistent in chainerrl.misc.namedpersistent(link): if isinstance(persistent, np.ndarray): typecode = persistent.dtype.char shared_arrays[persistent_name] = mp.RawArray( typecode, persistent.ravel()) else: assert np.isscalar(persistent) # Wrap by a 1-dim array because multiprocessing.RawArray does not # accept a 0-dim array. persistent_as_array = np.asarray([persistent]) typecode = persistent_as_array.dtype.char shared_arrays[persistent_name] = mp.RawArray( typecode, persistent_as_array) return shared_arrays
Example #10
Source File: test_link.py From chainer with MIT License | 6 votes |
def setUp(self): self.link = chainer.Link() shape = (2, 2) dtype = numpy.float32 y_array = numpy.random.rand(*shape).astype(dtype) pa_array = numpy.random.rand(*shape).astype(dtype) ps_scalar = 2.4 with self.link.init_scope(): # Initialized parameter self.link.y = chainer.Parameter(y_array) # Uninitialized parameter self.link.v = chainer.Parameter() # Persistent ndarray self.link.add_persistent('pa', pa_array) # Persistent scalar self.link.add_persistent('ps', ps_scalar) self.y_array = y_array self.pa_array = pa_array self.ps_scalar = ps_scalar if cuda.available: self.current_device_id = cuda.cupy.cuda.get_device_id()
Example #11
Source File: test_link.py From chainer with MIT License | 6 votes |
def setUp(self): self.l1 = chainer.Link() with self.l1.init_scope(): self.l1.x = chainer.Parameter(shape=(2, 3)) self.l1.y = chainer.Parameter() self.l2 = chainer.Link() with self.l2.init_scope(): self.l2.x = chainer.Parameter(shape=2) self.l3 = chainer.Link() with self.l3.init_scope(): self.l3.x = chainer.Parameter(shape=3) self.l4 = chainer.Link() self.l5 = chainer.Link() self.l6 = chainer.Link() self.c1 = chainer.ChainList(self.l1) self.c1.add_link(self.l2) self.c2 = chainer.ChainList(self.c1) self.c2.append(self.l3) self.c3 = chainer.ChainList(self.l4)
Example #12
Source File: async_.py From chainerrl with MIT License | 6 votes |
def set_shared_params(a, b): """Set shared params (and persistent values) to a link. Args: a (chainer.Link): link whose params are to be replaced b (dict): dict that consists of (param_name, multiprocessing.Array) """ assert isinstance(a, chainer.Link) remaining_keys = set(b.keys()) for param_name, param in a.namedparams(): if param_name in b: shared_param = b[param_name] param.array = np.frombuffer( shared_param, dtype=param.dtype).reshape(param.shape) remaining_keys.remove(param_name) for persistent_name, _ in chainerrl.misc.namedpersistent(a): if persistent_name in b: _set_persistent_values_recursively( a, persistent_name, b[persistent_name]) remaining_keys.remove(persistent_name) assert not remaining_keys
Example #13
Source File: test_link.py From chainer with MIT License | 6 votes |
def setUp(self): self.link = chainer.Link() shape = (2, 2) dtype = numpy.float32 y_array = numpy.random.rand(*shape).astype(dtype) pa_array = numpy.random.rand(*shape).astype(dtype) ps_scalar = 2.4 with self.link.init_scope(): # Initialized parameter self.link.y = chainer.Parameter(y_array) # Uninitialized parameter self.link.v = chainer.Parameter() # Persistent ndarray self.link.add_persistent('pa', pa_array) # Persistent scalar self.link.add_persistent('ps', ps_scalar) self.y_array = y_array self.pa_array = pa_array self.ps_scalar = ps_scalar
Example #14
Source File: test_link.py From chainer with MIT License | 6 votes |
def setUp(self): self.link = chainer.Link() shape = (2, 2) dtype = numpy.float32 y_array = numpy.random.rand(*shape).astype(dtype) pa_array = numpy.random.rand(*shape).astype(dtype) ps_scalar = 2.4 with self.link.init_scope(): # Initialized parameter self.link.y = chainer.Parameter(y_array) # Uninitialized parameter self.link.v = chainer.Parameter() # Persistent ndarray self.link.add_persistent('pa', pa_array) # Persistent scalar self.link.add_persistent('ps', ps_scalar) self.y_array = y_array self.pa_array = pa_array self.ps_scalar = ps_scalar
Example #15
Source File: test_link.py From chainer with MIT License | 6 votes |
def test_copyparams(self): l1 = chainer.Link() with l1.init_scope(): l1.x = chainer.Parameter(shape=(2, 3)) l1.y = chainer.Parameter() l2 = chainer.Link() with l2.init_scope(): l2.x = chainer.Parameter(shape=2) l3 = chainer.Link() with l3.init_scope(): l3.x = chainer.Parameter(shape=3) c1 = chainer.ChainList(l1, l2) c2 = chainer.ChainList(c1, l3) l1.x.data.fill(0) l2.x.data.fill(1) l3.x.data.fill(2) self.c2.copyparams(c2) numpy.testing.assert_array_equal(self.l1.x.data, l1.x.data) numpy.testing.assert_array_equal(self.l2.x.data, l2.x.data) numpy.testing.assert_array_equal(self.l3.x.data, l3.x.data)
Example #16
Source File: link_hook.py From chainer with MIT License | 5 votes |
def __init__( self, link: 'chainer.link.Link', forward_name: str, args: tp.Tuple[tp.Any, ...], kwargs: tp.Dict[str, tp.Any] ) -> None: self.link = link self.forward_name = forward_name self.args = args self.kwargs = kwargs
Example #17
Source File: spectral_normalization.py From chainer with MIT License | 5 votes |
def _prepare_parameters(self, link, input_variable=None): """Prepare one buffer and one parameter. Args: link (:class:`~chainer.Link`): Link to normalize spectrally. input_variable (:class:`~chainer.Variable`): The first minibatch to initialize weight. """ if getattr(link, self.weight_name).array is None: if input_variable is not None: link._initialize_params(input_variable.shape[1]) initialW = getattr(link, self.weight_name) if initialW.shape[self.axis] == 0: raise ValueError( 'Expect {}.shape[{}] > 0'.format(self.weight_name, self.axis) ) u = link.xp.random.normal( size=(initialW.shape[self.axis],)).astype(dtype=initialW.dtype) setattr(link, self.vector_name, u) link.register_persistent(self.vector_name) if self.use_gamma: # Initialize the scaling parameter with the max singular value. weight_matrix = self.reshape_W(initialW.array) # TODO(crcrpar): Remove this when chainerx supports SVD. device = link.device if device.xp is chainerx: fallback_device = device.fallback_device weight_matrix_ = fallback_device.send(weight_matrix) with chainer.using_device(fallback_device): _, s_, _ = fallback_device.xp.linalg.svd(weight_matrix_) s = device.send(s_) else: _, s, _ = link.xp.linalg.svd(weight_matrix) s0 = chainer.utils.force_array(s[0]) with link.init_scope(): link.gamma = variable.Parameter(s0) self._initialized = True
Example #18
Source File: sequential.py From chainer with MIT License | 5 votes |
def copy(self, mode='share'): ret = Sequential() for layer in self: if isinstance(layer, _link.Link): ret.append(layer.copy(mode)) else: ret.append(copy.copy(layer)) return ret
Example #19
Source File: sequential.py From chainer with MIT License | 5 votes |
def copyparams(self, link, copy_persistent=True): if not isinstance(link, Sequential): raise ValueError('Objects other than Sequential object cannot be ' 'copied to a Sequential object.') for idx, child in enumerate(self): if isinstance(child, _link.Link): child.copyparams(link[idx], copy_persistent)
Example #20
Source File: sequential.py From chainer with MIT License | 5 votes |
def count_by_layer_type(self, type_name): """Count the number of layers by layer type. This method counts the number of layers which have the name given by the argument ``type_name``. For example, if you want to know the number of :class:`~links.Linear` layers included in this model, ``type_name`` should be ``Linear``. If you want to know the number of :class:`~Function` classes or user-defined functions which have a specific name, ``type_name`` should be the function name, e.g., ``relu`` or ``reshape``, etc. Args: type_name (str): The class or function name of a layer you want to enumerate. """ num = 0 for layer in self._layers: if isinstance(layer, _link.Link): if layer.__class__.__name__ == type_name: num += 1 else: if layer.__name__ == type_name: num += 1 return num
Example #21
Source File: test_optimizers.py From chainer with MIT License | 5 votes |
def setUp(self): self.target = chainer.Link() with self.target.init_scope(): self.target.w = chainer.Parameter()
Example #22
Source File: sequential.py From chainer with MIT License | 5 votes |
def remove_by_layer_type(self, type_name): """Remove layers by layer type. This method removes layers from the Sequential object by the layer's class name or function name. If you want to remove a :class:`~Link`, the argument ``type_name`` should be its class name, e.g., :class:`~links.Linear` or :class:`~links.Convolution2D`, etc. If you want to remove a :class:`~Function` class or any other callable objects, ``type_name`` should be the function name, e.g., ``relu`` or ``reshape``, etc. Args: type_name (str): The name of a layer you want to remove. """ names = [] for layer in self: if isinstance(layer, _link.Link): name = layer.__class__.__name__ else: name = layer.__name__ names.append((name, layer)) for _name, _layer in names: if type_name == _name: self.remove(_layer)
Example #23
Source File: test_optimizers_by_linear_model.py From chainer with MIT License | 5 votes |
def test_initialize(self): skip, msg = self.skip_loss_scaling() if skip: return unittest.SkipTest(msg) model = self.model.model assert isinstance(model, chainer.Link) optimizer = self.create() optimizer.setup(model) _optimizer_loss_scaling(optimizer, self.loss_scaling) msg = 'optimization target must be a link' with six.assertRaisesRegex(self, TypeError, msg): optimizer.setup('xxx')
Example #24
Source File: test_link_hook.py From chainer with MIT License | 5 votes |
def forward_preprocess(self, args): assert isinstance(args.link, chainer.Link) assert isinstance(args.forward_name, str) assert isinstance(args.args, tuple) assert isinstance(args.kwargs, dict) assert isinstance(str(args), str) assert isinstance(repr(args), str) self.forward_preprocess_args.append((_process_time(), args))
Example #25
Source File: test_link_hook.py From chainer with MIT License | 5 votes |
def deleted(self, link): assert link is None or isinstance(link, chainer.Link) self.deleted_args.append((_process_time(), link))
Example #26
Source File: test_link_hook.py From chainer with MIT License | 5 votes |
def added(self, link): assert link is None or isinstance(link, chainer.Link) self.added_args.append((_process_time(), link))
Example #27
Source File: test_standard_updater.py From chainer with MIT License | 5 votes |
def setUp(self): self.target = chainer.Link() self.optimizer = DummyOptimizer() self.optimizer.setup(self.target)
Example #28
Source File: test_standard_updater.py From chainer with MIT License | 5 votes |
def test_device( self, model_initial_backend_config, model_backend_config, input_backend_config): model_initial_device = model_initial_backend_config.device device = model_backend_config.device input_device = input_backend_config.device model = chainer.Link() model.to_device(model_initial_device) optimizer = DummyOptimizer() optimizer.setup(model) iterator = DummyIterator([numpy.array(1), numpy.array(2)]) updater = training.updaters.StandardUpdater( iterator, optimizer, device=device, input_device=input_device) assert updater.device is device assert updater.input_device is input_device # Check the model device. assert model.device == device updater.update_core() assert optimizer.update.call_count == 1 args, kwargs = optimizer.update.call_args assert len(args) == 2 assert len(kwargs) == 0 loss, v1 = args # Check the input device. assert backend.get_device_from_array(v1) == input_device
Example #29
Source File: test_standard_updater.py From chainer with MIT License | 5 votes |
def setUp(self): self.target = chainer.Link() self.iterator = DummyIterator([(numpy.array(1), numpy.array(2))]) self.optimizer = DummyOptimizer() self.optimizer.setup(self.target) self.updater = training.updaters.StandardUpdater( self.iterator, self.optimizer)
Example #30
Source File: test_link.py From chainer with MIT License | 5 votes |
def setUp(self): self.array = numpy.array([1, 2, 3], dtype=numpy.float32) self.serializer = mock.MagicMock(return_value=self.array) link = chainer.Link() with link.init_scope(): link.x = chainer.Parameter() link.y = chainer.Parameter() link.add_persistent('z', None) self.link = link