Python chainer.backend.get_array_module() Examples

The following are 30 code examples of chainer.backend.get_array_module(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer.backend , or try the search function .
Example #1
Source File: test_variable.py    From chainer with MIT License 6 votes vote down vote up
def check_traceback(self, x_data):
        xp = backend.get_array_module(x_data)

        class DummyFunction(chainer.Function):
            label = 'dummy_function'

            def forward(self, inputs):
                return xp.array(1, np.float32),

            def backward(self, inputs, grads):
                return xp.array([1, 2], np.float32),

        x = chainer.Variable(x_data)
        line = inspect.currentframe().f_lineno + 1
        y = DummyFunction()(x)  # `line` is THIS line
        try:
            y.backward()
            self.fail()
        except ValueError as e:
            assert 'Stacktrace' in str(e)
            assert 'line %d' % line in str(e) 
Example #2
Source File: test_bilinear.py    From chainer with MIT License 6 votes vote down vote up
def forward_expected(self, inputs):
        if self.test_partial:
            e1, e2, W = inputs
            V1 = None
            V2 = None
            b = None
        else:
            e1, e2, W, V1, V2, b = inputs

        e1 = e1.reshape(e1.shape[0], -1)
        e2 = e2.reshape(e2.shape[0], -1)
        xp = backend.get_array_module(e1)
        y_expect = xp.einsum('ij,ik,jkl->il', e1, e2, W)
        flags = V1 is None, V2 is None, b is None
        if any(flags):
            if not all(flags):
                raise ValueError(
                    'Test either all or none of the optional parameters.')
        else:
            y_expect += e1.dot(V1)
            y_expect += e2.dot(V2)
            y_expect += b
        return y_expect, 
Example #3
Source File: test_simplified_dropconnect.py    From chainer with MIT License 6 votes vote down vote up
def check_backward(self, x_data, W_data, b_data, y_grad):
        args = x_data, W_data
        if b_data is not None:
            args += b_data,

        if self.use_batchwise_mask:
            mask_shape = (x_data.shape[0],) + W_data.shape
        else:
            mask_shape = W_data.shape

        xp = backend.get_array_module(x_data)
        mask = xp.random.rand(*mask_shape) >= self.ratio

        def f(x, W, b=None):
            return functions.simplified_dropconnect(
                x, W, b, self.ratio, self.train, mask,
                self.use_batchwise_mask)

        gradient_check.check_backward(
            f, args, y_grad, eps=1e-2, **self.check_backward_options) 
Example #4
Source File: collective_communication.py    From chainer with MIT License 6 votes vote down vote up
def backward(self, inputs, grad_outputs):
        xp = backend.get_array_module(*inputs)
        gy, = grad_outputs
        gy_dtype = gy.dtype

        # convert to float32 for communication
        if numpy.float16 == gy_dtype:
            gy = gy.astype(numpy.float32)

        gxs = self.comm.gather(gy, self.root)

        if self.comm.rank == self.root:

            # convert back
            if numpy.float16 == gy_dtype:
                gxs = tuple([item.astype(gy_dtype) for item in gxs])
            return gxs

        else:
            # Slave processes need to maintain input/output shapes.
            if inputs == ():
                dummy_var = tuple([xp.array([], dtype=xp.float32)])
            else:
                dummy_var = tuple([xp.zeros_like(x) for x in inputs])
            return dummy_var 
Example #5
Source File: collective_communication.py    From chainer with MIT License 6 votes vote down vote up
def forward(self, inputs):
        xp = backend.get_array_module(*inputs)
        x, = inputs

        # convert to float32 for communication
        x_dtype = x.dtype
        if numpy.float16 == x_dtype:
            x = x.astype(numpy.float32)
        ys = self.comm.gather(x, self.root)

        if self.comm.rank == self.root:

            # convert back
            if numpy.float16 == x_dtype:
                ys = tuple([item.astype(x_dtype) for item in ys])
            return ys

        else:
            # Return an empty variable, which serves as "delegate_variable."
            return xp.array([], dtype=x_dtype), 
Example #6
Source File: collective_communication.py    From chainer with MIT License 6 votes vote down vote up
def backward(self, inputs, grad_outputs):
        gx, = grad_outputs
        gx_dtype = gx.dtype
        # convert to float32 for communication
        if numpy.float16 == gx_dtype:
            gx = gx.astype(numpy.float32)

        gxs = self.comm.gather(gx, self.root)

        if self.comm.rank == self.root:
            xp = backend.get_array_module(*gxs)
            gxs = xp.stack(gxs)
            _sum = gxs.sum(axis=0),

            # convert back
            if numpy.float16 == gx_dtype:
                _sum = tuple([item.astype(gx_dtype) for item in _sum])
            return _sum
        else:
            return None, 
Example #7
Source File: collective_communication.py    From chainer with MIT License 6 votes vote down vote up
def backward(self, inputs, grad_outputs):
        xp = backend.get_array_module(*inputs)
        grad_dtype = grad_outputs[0].dtype

        # convert to float32 for communication
        if numpy.float16 == grad_dtype:
            grad_outputs = tuple([item.astype(numpy.float32)
                                  for item in grad_outputs])
        gxs = self.comm.alltoall(grad_outputs)

        gx = xp.stack(gxs).sum(axis=0)

        # convert back
        if numpy.float16 == grad_dtype:
            gx = gx.astype(grad_dtype)
        return gx, 
Example #8
Source File: test_zoneout.py    From chainer with MIT License 6 votes vote down vote up
def check_double_backward(
            self, h_data, x_data, y_grad, h_grad_grad, x_grad_grad):
        xp = backend.get_array_module(h_data)
        flag_x = xp.random.rand(*x_data.shape)

        def f(h, x):
            # As forward computation is executed multiple times in
            # check_double_backward, use a fixed flag.
            xp_str = 'numpy' if xp is numpy else 'cupy'
            with mock.patch(
                    '{}.random.rand'.format(xp_str),
                    return_value=flag_x) as mock_rand:
                y = functions.zoneout(h, x, self.ratio)
                mock_rand.assert_called_once_with(*x.shape)
            return y

        gradient_check.check_double_backward(
            f, (h_data, x_data), y_grad, (h_grad_grad, x_grad_grad),
            dtype=numpy.float64) 
Example #9
Source File: test_shift.py    From chainer with MIT License 6 votes vote down vote up
def test_forward_consistency(self):
        x_data = self.x
        xp = backend.get_array_module(x_data)

        if not self.c_contiguous:
            x_data = xp.asfortranarray(x_data)
            self.assertFalse(x_data.flags.c_contiguous)

        x_cpu = chainer.Variable(x_data)
        y_cpu = shift.shift(
            x_cpu, ksize=self.ksize, dilate=self.dilate)

        x_gpu = chainer.Variable(cuda.to_gpu(x_data))
        y_gpu = shift.shift(
            x_gpu, ksize=self.ksize, dilate=self.dilate)

        testing.assert_allclose(
            y_cpu.data, y_gpu.data.get(), atol=5e-4, rtol=5e-3) 
Example #10
Source File: test_optimizer.py    From chainer with MIT License 6 votes vote down vote up
def test_update(self, backend_config):
        if backend_config.xp is chainerx:
            # ChainerX performs the loss scaling on its own backward
            # method, the optimizer should not divide back the parameters
            # This test is not actually creating a ChainerX
            # computation graph so no actual loss scale is being done
            self.optimizer.lr = 1.0
        target = self.target
        optimizer = self.optimizer
        target.to_device(backend_config.device)
        optimizer.setup(target)
        optimizer.update()
        xp = backend.get_array_module(target[0].param)
        expected_data = xp.zeros(self.shape, dtype=self.dtype)
        rtol, atol = 1e-4, 1e-5
        if self.dtype is np.float16:
            rtol, atol = 1e-1, 1e-2
        for i in range(2):
            testing.assert_allclose(
                target[i].param.data, expected_data,
                rtol=rtol, atol=atol) 
Example #11
Source File: test_variable.py    From chainer with MIT License 6 votes vote down vote up
def check_type_mismatch(self, x_data, retain):
        xp = backend.get_array_module(x_data)

        class DummyFunction(chainer.Function):
            label = 'dummy_function'

            def forward(self, inputs):
                if not retain:
                    self.retain_inputs(())
                return xp.array(1, np.float32),

            def backward(self, inputs, grads):
                return [1]

        x = chainer.Variable(x_data)
        y = DummyFunction()(x)
        with six.assertRaisesRegex(self, TypeError, 'dummy_function'):
            y.backward() 
Example #12
Source File: test_variable.py    From chainer with MIT License 6 votes vote down vote up
def check_dtype_mismatch(self, x_data, retain):
        xp = backend.get_array_module(x_data)

        class DummyFunction(chainer.Function):
            label = 'dummy_function'

            def forward(self, inputs):
                if not retain:
                    self.retain_inputs(())
                return xp.array(1, np.float32),

            def backward(self, inputs, grads):
                return xp.array([1], np.int32),

        x = chainer.Variable(x_data)
        y = DummyFunction()(x)
        with six.assertRaisesRegex(self, TypeError, 'dummy_function'):
            y.backward() 
Example #13
Source File: test_variable.py    From chainer with MIT License 6 votes vote down vote up
def check_shape_mismatch(self, x_data, retain):
        xp = backend.get_array_module(x_data)

        class DummyFunction(chainer.Function):
            label = 'dummy_function'

            def forward(self, inputs):
                if not retain:
                    self.retain_inputs(())
                return xp.array(1, np.float32),

            def backward(self, inputs, grads):
                return xp.array([1, 2], np.float32),

        x = chainer.Variable(x_data)
        y = DummyFunction()(x)
        with six.assertRaisesRegex(self, ValueError, 'dummy_function'):
            y.backward() 
Example #14
Source File: distribution.py    From chainer with MIT License 5 votes vote down vote up
def xp(self):
        """Array module for the distribution.

        Depending on which of CPU/GPU this distribution is on, this property
        returns :mod:`numpy` or :mod:`cupy`.
        """
        return backend.get_array_module(*self.params.values()) 
Example #15
Source File: uniform.py    From chainer with MIT License 5 votes vote down vote up
def log_prob(self, x):
        if not isinstance(x, chainer.Variable):
            x = chainer.Variable(x)

        xp = backend.get_array_module(x)

        logp = broadcast.broadcast_to(
            -exponential.log(self.scale), x.shape)
        return where.where(
            utils.force_array(
                (x.data >= self.low.data) & (x.data <= self.high.data)),
            logp,
            xp.array(-xp.inf, logp.dtype)) 
Example #16
Source File: variable_statistics_plot.py    From chainer with MIT License 5 votes vote down vote up
def collect(self, x, axis):
        out = dict()

        if self.collect_mean:
            out['mean'] = x.mean(axis=axis)

        if self.collect_std:
            out['std'] = x.std(axis=axis)

        if self.percentile_sigmas:
            xp = backend.get_array_module(x)
            p = xp.percentile(x, self.percentile_sigmas, axis=axis)
            out['percentile'] = p

        return out 
Example #17
Source File: variable_statistics_plot.py    From chainer with MIT License 5 votes vote down vote up
def __call__(self, trainer):
        if self.available():
            # Dynamically import pyplot to call matplotlib.use()
            # after importing chainer.training.extensions
            import matplotlib.pyplot as plt
        else:
            return

        xp = backend.get_array_module(self._vars[0].data)
        stats = xp.zeros(self._data_shape, dtype=xp.float32)
        for i, k in enumerate(self._keys):
            xs = []
            for var in self._vars:
                x = getattr(var, k, None)
                if x is not None:
                    xs.append(x.ravel())
            if xs:
                stat_dict = self._statistician(
                    xp.concatenate(xs, axis=0), axis=0, xp=xp)
                stat_list = []
                if self._plot_mean:
                    stat_list.append(xp.atleast_1d(stat_dict['mean']))
                if self._plot_std:
                    stat_list.append(xp.atleast_1d(stat_dict['std']))
                if self._plot_percentile:
                    stat_list.append(xp.atleast_1d(stat_dict['percentile']))
                stats[i] = xp.concatenate(stat_list, axis=0)

        if xp == cuda.cupy:
            stats = cuda.to_cpu(stats)
        self._samples.add(stats, idx=trainer.updater.iteration)

        if self._trigger(trainer):
            file_path = os.path.join(trainer.out, self._filename)
            self.save_plot_using_module(file_path, plt) 
Example #18
Source File: reporter.py    From chainer with MIT License 5 votes vote down vote up
def make_statistics(self):
        """Computes and returns the mean and standard deviation values.

        Returns:
            tuple: Mean and standard deviation values.

        """
        x, n = self._x, self._n
        xp = backend.get_array_module(x)
        with chainer.using_device(backend.get_device_from_array(x)):
            mean = x / n
            var = self._x2 / n - mean * mean
            std = xp.sqrt(var)
            return mean, std 
Example #19
Source File: test_lgamma.py    From chainer with MIT License 5 votes vote down vote up
def _lgamma_expected(x, dtype):
    if backend.get_array_module(x) is numpy:
        return _lgamma_cpu(x, dtype)
    else:
        return _lgamma_gpu(x, dtype) 
Example #20
Source File: test_erfc.py    From chainer with MIT License 5 votes vote down vote up
def _erfc_expected(x, dtype):
    if backend.get_array_module(x) is numpy:
        return _erfc_cpu(x, dtype)
    else:
        return _erfc_gpu(x, dtype) 
Example #21
Source File: test_ndtri.py    From chainer with MIT License 5 votes vote down vote up
def _ndtri_expected(x, dtype):
    if backend.get_array_module(x) is numpy:
        return _ndtri_cpu(x, dtype)
    else:
        return _ndtri_gpu(x, dtype) 
Example #22
Source File: test_erf.py    From chainer with MIT License 5 votes vote down vote up
def _erf_expected(x, dtype):
    if backend.get_array_module(x) is numpy:
        return _erf_cpu(x, dtype)
    else:
        return _erf_gpu(x, dtype) 
Example #23
Source File: test_dilated_convolution_2d.py    From chainer with MIT License 5 votes vote down vote up
def check_backward(self, x_data, W_data, b_data, y_grad):
        xp = backend.get_array_module(x_data)
        if not self.c_contiguous:
            x_data = xp.asfortranarray(x_data)
            W_data = xp.asfortranarray(W_data)
            y_grad = xp.asfortranarray(y_grad)
            self.assertFalse(x_data.flags.c_contiguous)
            self.assertFalse(W_data.flags.c_contiguous)
            self.assertFalse(y_grad.flags.c_contiguous)
            if b_data is not None:
                b = xp.empty((len(b_data) * 2,), dtype=self.b.dtype)
                b[::2] = b_data
                b_data = b[::2]
                self.assertFalse(b_data.flags.c_contiguous)

        args = (x_data, W_data)
        if b_data is not None:
            args = args + (b_data,)

        def f(*args):
            return F.dilated_convolution_2d(*args, stride=self.stride,
                                            pad=self.pad, dilate=self.dilate,
                                            cover_all=self.cover_all)

        with chainer.using_config('use_cudnn', self.use_cudnn):
            gradient_check.check_backward(
                f, args, y_grad, dtype=numpy.float64,
                **self.check_backward_options) 
Example #24
Source File: test_shift.py    From chainer with MIT License 5 votes vote down vote up
def check_backward(self, x_data, y_grad):
        xp = backend.get_array_module(x_data)

        if not self.c_contiguous:
            x_data = xp.asfortranarray(x_data)
            y_grad = xp.asfortranarray(y_grad)
            self.assertFalse(x_data.flags.c_contiguous)
            self.assertFalse(y_grad.flags.c_contiguous)

        gradient_check.check_backward(
            lambda x: shift.shift(x, ksize=self.ksize, dilate=self.dilate),
            x_data, y_grad, dtype='d', atol=5e-4, rtol=5e-3) 
Example #25
Source File: test_swish.py    From chainer with MIT License 5 votes vote down vote up
def _sigmoid(x):
    xp = backend.get_array_module(x)
    half = x.dtype.type(0.5)
    return xp.tanh(x * half) * half + half 
Example #26
Source File: test_link_peephole.py    From chainer with MIT License 5 votes vote down vote up
def _sigmoid(x):
    xp = backend.get_array_module(x)
    half = x.dtype.type(0.5)
    return xp.tanh(x * half) * half + half 
Example #27
Source File: test_link_tree_lstm.py    From chainer with MIT License 5 votes vote down vote up
def _sigmoid(x):
    half = x.dtype.type(0.5)
    xp = backend.get_array_module(x)
    return xp.tanh(x * half) * half + half 
Example #28
Source File: test_link_gru.py    From chainer with MIT License 5 votes vote down vote up
def _sigmoid(x):
    xp = backend.get_array_module(x)
    half = x.dtype.type(0.5)
    return xp.tanh(x * half) * half + half 
Example #29
Source File: test_bernoulli.py    From chainer with MIT License 5 votes vote down vote up
def check_prob_binary_check(self, is_gpu):
        smp = self.sample_for_binary_check_test()
        if is_gpu:
            prob = self.gpu_dist.prob(cuda.to_gpu(smp)).data
        else:
            prob = self.cpu_dist.prob(smp).data
        xp = backend.get_array_module(prob)
        if self.binary_check:
            self.assertTrue(xp.all(prob == 0))
        else:
            self.assertTrue(xp.all(prob > 0)) 
Example #30
Source File: test_link_gru.py    From chainer with MIT License 5 votes vote down vote up
def _gru(func, h, x):
    xp = backend.get_array_module(h, x)

    r = _sigmoid(x.dot(func.W_r.W.data.T) + h.dot(func.U_r.W.data.T))
    z = _sigmoid(x.dot(func.W_z.W.data.T) + h.dot(func.U_z.W.data.T))
    h_bar = xp.tanh(x.dot(func.W.W.data.T) + (r * h).dot(func.U.W.data.T))
    y = (1 - z) * h + z * h_bar
    return y