Python chainer.functions.fixed_batch_normalization() Examples
The following are 18
code examples of chainer.functions.fixed_batch_normalization().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer.functions
, or try the search function
.
Example #1
Source File: test_batch_normalization.py From chainer with MIT License | 6 votes |
def check_backward(self, inputs, grad_outputs, backend_config): inputs = backend_config.get_array(inputs) grad_outputs = backend_config.get_array(grad_outputs) if not self.c_contiguous: with backend_config: inputs = _as_noncontiguous_array(inputs) grad_outputs = _as_noncontiguous_array(grad_outputs) def f(*inputs): y = functions.fixed_batch_normalization(*inputs, eps=self.eps) return y, with backend_config: gradient_check.check_backward( f, inputs, grad_outputs, **self.check_backward_options)
Example #2
Source File: test_batch_normalization.py From chainer with MIT License | 6 votes |
def check_double_backward( self, inputs, grad_outputs, grad_grad_inputs, backend_config): inputs = backend_config.get_array(inputs) grad_outputs = backend_config.get_array(grad_outputs) grad_grad_inputs = backend_config.get_array(grad_grad_inputs) if not self.c_contiguous: with backend_config: inputs = _as_noncontiguous_array(inputs) grad_outputs = _as_noncontiguous_array(grad_outputs) grad_grad_inputs = _as_noncontiguous_array(grad_grad_inputs) def f(*inputs): return functions.fixed_batch_normalization(*inputs, eps=self.eps) with backend_config: gradient_check.check_double_backward( f, inputs, grad_outputs, grad_grad_inputs, **self.check_double_backward_options)
Example #3
Source File: test_normalizations.py From onnx-chainer with MIT License | 5 votes |
def setUp(self): class Model(chainer.Chain): def __call__(self, x): mean = x.array.mean(axis=0) var = x.array.var(axis=0) gamma = np.ones_like(mean, dtype=x.dtype) beta = np.zeros_like(mean, dtype=x.dtype) return F.fixed_batch_normalization(x, gamma, beta, mean, var) self.model = Model() self.x = input_generator.increasing(2, 5)
Example #4
Source File: gpu_test_batchnorm.py From deep-learning-from-scratch-3 with MIT License | 5 votes |
def test_forward4(self): N, C, H, W = 20, 10, 5, 5 x, gamma, beta, mean, var = get_params(N, C, H, W) cy = CF.fixed_batch_normalization(x, gamma, beta, mean, var) with dezero.test_mode(): y = F.batch_nrom(x, gamma, beta, mean, var) self.assertTrue(array_allclose(y.data, cy.data))
Example #5
Source File: gpu_test_batchnorm.py From deep-learning-from-scratch-3 with MIT License | 5 votes |
def test_forward3(self): N, C = 20, 10 x, gamma, beta, mean, var = get_params(N, C) cy = CF.fixed_batch_normalization(x, gamma, beta, mean, var) with dezero.test_mode(): y = F.batch_nrom(x, gamma, beta, mean, var) self.assertTrue(array_allclose(y.data, cy.data))
Example #6
Source File: gpu_test_batchnorm.py From deep-learning-from-scratch-3 with MIT License | 5 votes |
def test_forward1(self): N, C = 8, 1 x, gamma, beta, mean, var = get_params(N, C) cy = CF.fixed_batch_normalization(x, gamma, beta, mean, var) with dezero.test_mode(): y = F.batch_nrom(x, gamma, beta, mean, var) self.assertTrue(array_allclose(y.data, cy.data))
Example #7
Source File: test_batchnorm.py From deep-learning-from-scratch-3 with MIT License | 5 votes |
def test_forward4(self): N, C, H, W = 20, 10, 5, 5 x, gamma, beta, mean, var = get_params(N, C, H, W) cy = CF.fixed_batch_normalization(x, gamma, beta, mean, var) with dezero.test_mode(): y = F.batch_nrom(x, gamma, beta, mean, var) self.assertTrue(array_allclose(y.data, cy.data))
Example #8
Source File: test_batchnorm.py From deep-learning-from-scratch-3 with MIT License | 5 votes |
def test_forward3(self): N, C = 20, 10 x, gamma, beta, mean, var = get_params(N, C) cy = CF.fixed_batch_normalization(x, gamma, beta, mean, var) with dezero.test_mode(): y = F.batch_nrom(x, gamma, beta, mean, var) self.assertTrue(array_allclose(y.data, cy.data))
Example #9
Source File: test_batchnorm.py From deep-learning-from-scratch-3 with MIT License | 5 votes |
def test_forward2(self): N, C = 1, 10 x, gamma, beta, mean, var = get_params(N, C) cy = CF.fixed_batch_normalization(x, gamma, beta, mean, var) with dezero.test_mode(): y = F.batch_nrom(x, gamma, beta, mean, var) self.assertTrue(array_allclose(y.data, cy.data))
Example #10
Source File: test_batchnorm.py From deep-learning-from-scratch-3 with MIT License | 5 votes |
def test_forward1(self): N, C = 8, 1 x, gamma, beta, mean, var = get_params(N, C) cy = CF.fixed_batch_normalization(x, gamma, beta, mean, var) with dezero.test_mode(): y = F.batch_nrom(x, gamma, beta, mean, var) self.assertTrue(array_allclose(y.data, cy.data))
Example #11
Source File: test_normalizations.py From onnx-chainer with MIT License | 5 votes |
def setUp(self): class Model(chainer.Chain): def __call__(self, x, gamma, beta, mean, var): return F.fixed_batch_normalization(x, gamma, beta, mean, var) self.model = Model() self.x = input_generator.increasing(2, 5) self.mean = self.x.mean(axis=0) self.var = self.x.var(axis=0) self.gamma = np.ones_like(self.mean, dtype=self.x.dtype) self.beta = np.zeros_like(self.mean, dtype=self.x.dtype)
Example #12
Source File: gen_extra_test.py From chainer-compiler with MIT License | 5 votes |
def gen_convtranspose_bn(test_name): gb = onnx_script.GraphBuilder(test_name) bsize = 2 ichan = 3 ochan = 4 ksize = 3 isize = 7 x = aranges(bsize, ochan, isize, isize) w = aranges(ochan, ichan, ksize, ksize) * 0.01 scale = aranges(ichan) * 0.1 + 1 bias = aranges(ichan) * 0.1 + 2 mean = aranges(ichan) * 0.1 + 3 var = aranges(ichan) * 0.1 + 4 conv = F.deconvolution_2d(x, w, pad=1, outsize=(isize, isize)) y = F.fixed_batch_normalization(conv, scale, bias, mean, var) x_v = gb.input('x', x) w_v = gb.param('w', w) scale_v = gb.param('scale', scale) bias_v = gb.param('bias', bias) mean_v = gb.param('mean', mean) var_v = gb.param('var', var) conv_v = gb.ConvTranspose([x_v, w_v], kernel_shape=[ksize, ksize], pads=[1, 1, 1, 1], output_shape=[isize, isize]) y_v = gb.BatchNormalization([conv_v, scale_v, bias_v, mean_v, var_v]) gb.output(y_v, y) gb.gen_test()
Example #13
Source File: test_normalizations.py From chainer with MIT License | 5 votes |
def setUp(self): class Model(chainer.Chain): def __call__(self, x, gamma, beta, mean, var): return F.fixed_batch_normalization(x, gamma, beta, mean, var) self.model = Model() self.x = input_generator.increasing(2, 5) self.mean = self.x.mean(axis=0) self.var = self.x.var(axis=0) self.gamma = np.ones_like(self.mean, dtype=self.x.dtype) self.beta = np.zeros_like(self.mean, dtype=self.x.dtype)
Example #14
Source File: test_normalizations.py From chainer with MIT License | 5 votes |
def setUp(self): class Model(chainer.Chain): def __call__(self, x): mean = x.array.mean(axis=0) var = x.array.var(axis=0) gamma = np.ones_like(mean, dtype=x.dtype) beta = np.zeros_like(mean, dtype=x.dtype) return F.fixed_batch_normalization(x, gamma, beta, mean, var) self.model = Model() self.x = input_generator.increasing(2, 5)
Example #15
Source File: test_batch_normalization.py From chainer with MIT License | 5 votes |
def test_valid(self): functions.fixed_batch_normalization(*self.args, eps=1e-5)
Example #16
Source File: test_batch_normalization.py From chainer with MIT License | 5 votes |
def check_forward(self, inputs, enable_backprop, backend_config): y_expected, = self.forward_cpu(inputs) inputs = backend_config.get_array(inputs) if not self.c_contiguous: with backend_config: inputs = _as_noncontiguous_array(inputs) with chainer.using_config('enable_backprop', enable_backprop): with backend_config: y = functions.fixed_batch_normalization(*inputs, eps=self.eps) assert y.data.dtype == self.dtype testing.assert_allclose( y_expected, y.data, **self.check_forward_options)
Example #17
Source File: test_batch_normalization.py From chainer with MIT License | 5 votes |
def _as_noncontiguous_array(array): # TODO(niboshi): cupy + cudnn test fails in F.fixed_batch_normalization. # Fix it and use testing.array._as_noncontiguous_array. def as_noncontiguous_array(arr): if arr is None: return None if isinstance(arr, (numpy.ndarray, cuda.ndarray)): xp = chainer.backend.get_array_module(arr) return xp.asfortranarray(arr) return testing.array._as_noncontiguous_array(arr) if isinstance(array, (list, tuple)): return type(array)([as_noncontiguous_array(arr) for arr in array]) return as_noncontiguous_array(array)
Example #18
Source File: misc.py From vat_chainer with MIT License | 5 votes |
def call_bn(bn, x, test=False, update_batch_stats=True): if test: return F.fixed_batch_normalization(x, bn.gamma, bn.beta, bn.avg_mean, bn.avg_var, use_cudnn=False) elif not update_batch_stats: return F.batch_normalization(x, bn.gamma, bn.beta, use_cudnn=False) else: return bn(x)