Python chainer.optimizers() Examples

The following are 8 code examples of chainer.optimizers(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer , or try the search function .
Example #1
Source File: test_optimizers.py    From chainer with MIT License 6 votes vote down vote up
def test_hyperparams(self):
        # TODO(niboshi): The following optimizers do not pass this test
        # because their __init__ do not accept some hyperparameters.
        # The test should be fixed.
        if self.optimizer_impl in (
                chainer.optimizers.AdamW,
                chainer.optimizers.AMSGrad,
                chainer.optimizers.AdaBound,
                chainer.optimizers.AMSBound,
        ):
            raise unittest.SkipTest(
                'The optimizer is incompatible with this test')

        self.create()
        default = self.optimizer.hyperparam.get_dict()
        for name, default_value in six.iteritems(default):
            self.create()
            self.assertEqual(self.get_hyperparam(name), default_value)
            new_value = default_value + 0.1
            self.create(**{name: new_value})
            self.assertEqual(self.get_hyperparam(name), new_value) 
Example #2
Source File: test_optimizers.py    From chainer with MIT License 6 votes vote down vote up
def test_adam_w(self, backend_config):
        xp = backend_config.xp
        device = backend_config.device

        link = chainer.Link(x=(1,))
        link.to_device(device)

        opt = optimizers.Adam(eta=0.5, weight_decay_rate=0.1)
        opt.setup(link)

        link.x.data.fill(1)
        link.x.grad = device.send(xp.ones_like(link.x.data))

        opt.update()

        # compare against the value computed with v5 impl
        testing.assert_allclose(link.x.data, np.array([0.9495]),
                                atol=1e-7, rtol=1e-7) 
Example #3
Source File: test_optimizers.py    From chainer with MIT License 5 votes vote down vote up
def test_all_optimizers_coverage(self):
        module = chainer.optimizers
        module_optimizers = []
        for name in dir(module):
            obj = getattr(module, name)
            if (isinstance(obj, type) and issubclass(obj, chainer.Optimizer)):
                module_optimizers.append(name)

        assert sorted(_all_optimizers) == sorted(module_optimizers) 
Example #4
Source File: test_optimizers.py    From chainer with MIT License 5 votes vote down vote up
def test_optimizer(self, backend_config):
        device = backend_config.device
        target = SimpleChain(self.shape)
        target.to_device(device)
        optimizer_cls = getattr(chainer.optimizers, self.optimizer)
        optimizer = optimizer_cls(**self.kwargs)
        optimizer.setup(target)

        x_np = np.asarray(np.random.randn(*self.shape)).astype(np.float32)
        x = chainer.Variable(device.send(x_np))

        # Just ensures no error occurs. No numerical check is performed.
        optimizer.update(target, x) 
Example #5
Source File: test_optimizers.py    From chainer with MIT License 5 votes vote down vote up
def test_amsgrad(self, backend_config):
        device = backend_config.device

        link = chainer.Link(x=(4,))
        x = link.x
        x.data.fill(0)
        link.to_device(device)

        opt = optimizers.Adam(alpha=0.01, beta2=0.7, amsgrad=True)
        opt.setup(link)

        x.grad = device.send(np.array([1, -1, 10, -10], np.float32))
        opt.update()
        testing.assert_allclose(
            x.update_rule.state['v'],
            [0.3, 0.3, 30, 30],
            atol=1e-7, rtol=1e-7)
        testing.assert_allclose(
            x.data,
            [-0.01, 0.01, -0.01, 0.01],
            atol=1e-7, rtol=1e-7)

        x.grad = device.send(np.array([-10, -10, -1, -1], np.float32))
        opt.update()
        testing.assert_allclose(
            x.update_rule.state['v'],
            [30.21, 30.21, 21.3, 21.3],
            atol=1e-7, rtol=1e-7)
        testing.assert_allclose(
            x.update_rule.state['vhat'],
            [30.21, 30.21, 30, 30],
            atol=1e-7, rtol=1e-7)
        testing.assert_allclose(
            x.data,
            # result with NumPy
            [-0.00377703, 0.01745388, -0.01548985, 0.01686232],
            atol=1e-7, rtol=1e-7) 
Example #6
Source File: config_utils.py    From voxelnet_chainer with MIT License 5 votes vote down vote up
def create_optimizer(config, model):
    Optimizer = getattr(chainer.optimizers, config['name'])
    opt = Optimizer(**config['args'])
    opt.setup(model)
    if 'hook' in config.keys():
        for key, value in config['hook'].items():
            hook = getattr(chainer.optimizer, key)
            opt.add_hook(hook(value))
    return opt 
Example #7
Source File: test_chainer.py    From delira with GNU Affero General Public License v3.0 5 votes vote down vote up
def setUp(self) -> None:
        if check_for_chainer_backend():
            import chainer
            import chainer.link
            import chainer.links
            import chainer.functions
            import chainer.optimizers
            from delira.models.backends.chainer.data_parallel import \
                DataParallelChainerOptimizer, \
                DataParallelChainerNetwork
            from delira.models.backends.chainer.abstract_network import \
                AbstractChainerNetwork

            # creating a really simple model to test dataparallel behavior
            class SimpleModel(AbstractChainerNetwork):
                def __init__(self):
                    super(SimpleModel, self).__init__()

                    with self.init_scope():
                        self.dense_1 = chainer.links.Linear(3, 32)
                        self.dense_2 = chainer.links.Linear(32, 2)

                def forward(self, x):
                    return self.dense_2(
                        chainer.functions.relu(
                            self.dense_1(x)))

            self.model = DataParallelChainerNetwork(SimpleModel(),
                                                    devices=["@numpy",
                                                             "@numpy"])

            self.optimizer = DataParallelChainerOptimizer.from_optimizer_class(
                chainer.optimizers.Adam
            )
            self.optimizer.setup(self.model) 
Example #8
Source File: builder.py    From lencon with MIT License 5 votes vote down vote up
def _build_optimizer(self, config):
        kwargs = {k: float(v) for k, v in config.items() if k != 'name'}
        o = getattr(chainer.optimizers, config['name'])(**kwargs)
        return o