Python chainer.gradient_check.check_backward() Examples
The following are 30
code examples of chainer.gradient_check.check_backward().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer.gradient_check
, or try the search function
.
Example #1
Source File: test_swish.py From chainer with MIT License | 5 votes |
def test_backward_cpu(self): self.check_backward(self.x, self.gy)
Example #2
Source File: test_link_n_step_gru.py From chainer with MIT License | 5 votes |
def test_backward_cpu(self): self.check_backward(self.h, self.xs, self.gh, self.gys)
Example #3
Source File: test_link_tree_lstm.py From chainer with MIT License | 5 votes |
def test_full_backward_cpu(self): self.check_backward(self.gc, self.gh, *self.inputs)
Example #4
Source File: test_link_n_step_gru.py From chainer with MIT License | 5 votes |
def test_backward_cpu(self): self.check_backward( self.h, self.xs, self.gh, self.gys)
Example #5
Source File: test_link_n_step_gru.py From chainer with MIT License | 5 votes |
def test_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.rnn.to_gpu() with chainer.using_config('use_cudnn', 'auto'): self.check_backward( cuda.to_gpu(self.h), [cuda.to_gpu(x) for x in self.xs], cuda.to_gpu(self.gh), [cuda.to_gpu(gy) for gy in self.gys])
Example #6
Source File: test_link_n_step_gru.py From chainer with MIT License | 5 votes |
def test_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.rnn.to_gpu() with chainer.using_config('use_cudnn', 'auto'): self.check_backward( cuda.to_gpu(self.h), [cuda.to_gpu(x) for x in self.xs], cuda.to_gpu(self.gh), [cuda.to_gpu(gy) for gy in self.gys])
Example #7
Source File: test_link_tree_lstm.py From chainer with MIT License | 5 votes |
def test_no_gh_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.link.to_gpu() self.check_backward(cuda.to_gpu(self.gc), None, *[cuda.to_gpu(v) for v in self.inputs])
Example #8
Source File: test_link_tree_lstm.py From chainer with MIT License | 5 votes |
def test_no_gc_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.link.to_gpu() self.check_backward(None, cuda.to_gpu(self.gh), *[cuda.to_gpu(v) for v in self.inputs])
Example #9
Source File: test_link_tree_lstm.py From chainer with MIT License | 5 votes |
def test_no_gh_backward_cpu(self): self.check_backward(self.gc, None, *self.inputs)
Example #10
Source File: test_link_tree_lstm.py From chainer with MIT License | 5 votes |
def test_no_gc_backward_cpu(self): self.check_backward(None, self.gh, *self.inputs)
Example #11
Source File: test_link_n_step_gru.py From chainer with MIT License | 5 votes |
def check_backward( self, h_data, xs_data, gh_data, gys_data): def fun(*args): if self.hidden_none: h = None xs = args else: h, = args[:1] xs = args[1:] hy, ys = self.rnn(h, xs) return tuple([hy, ] + list(ys)) params = [] for layer in self.rnn: for p in layer.params(): params.append(p) if self.hidden_none: in_data = xs_data else: in_data = [h_data] + xs_data gradient_check.check_backward( fun, tuple(in_data), tuple([gh_data] + gys_data), tuple(params), eps=1e-2, rtol=1e-3, atol=1e-3)
Example #12
Source File: test_link_n_step_lstm.py From chainer with MIT License | 5 votes |
def check_backward( self, h_data, c_data, xs_data, gh_data, gc_data, gys_data): def fun(*args): if self.hidden_none: h = c = None xs = args else: h, c = args[:2] xs = args[2:] hy, cy, ys = self.rnn(h, c, xs) return tuple([hy, cy] + list(ys)) params = [] for layer in self.rnn: for p in layer.params(): params.append(p) if self.hidden_none: in_data = xs_data else: in_data = [h_data, c_data] + xs_data gradient_check.check_backward( fun, tuple(in_data), tuple([gh_data, gc_data] + gys_data), tuple(params), eps=1e-2, rtol=1e-3, atol=1e-3)
Example #13
Source File: test_prelu.py From chainer with MIT License | 5 votes |
def test_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.link.to_gpu() self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
Example #14
Source File: test_prelu.py From chainer with MIT License | 5 votes |
def check_backward(self, x_data, y_grad): gradient_check.check_backward( self.link, x_data, y_grad, self.link.W, atol=1e-4)
Example #15
Source File: test_swish.py From chainer with MIT License | 5 votes |
def test_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.link.to_gpu() self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
Example #16
Source File: test_link_n_step_lstm.py From chainer with MIT License | 5 votes |
def test_backward_cpu(self): self.check_backward( self.h, self.c, self.xs, self.gh, self.gc, self.gys)
Example #17
Source File: test_swish.py From chainer with MIT License | 5 votes |
def check_backward(self, x_data, gy_data): gradient_check.check_backward( self.link, x_data, gy_data, self.link.beta, atol=1e-4)
Example #18
Source File: test_simplified_dropconnect.py From chainer with MIT License | 5 votes |
def test_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.link.to_gpu() self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy), cuda.to_gpu(self.mask))
Example #19
Source File: test_simplified_dropconnect.py From chainer with MIT License | 5 votes |
def check_backward(self, x_data, y_grad, mask): gradient_check.check_backward( self.link_wrapper, (x_data, mask), y_grad, (self.link.W, self.link.b), dtype='d', atol=1e-4, rtol=1e-3)
Example #20
Source File: test_simplified_dropconnect.py From chainer with MIT License | 5 votes |
def test_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.link.to_gpu() self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy), cuda.to_gpu(self.mask))
Example #21
Source File: test_simplified_dropconnect.py From chainer with MIT License | 5 votes |
def test_backward_cpu(self): self.check_backward(self.x, self.gy, self.mask)
Example #22
Source File: test_simplified_dropconnect.py From chainer with MIT License | 5 votes |
def check_backward(self, x_data, y_grad, mask): gradient_check.check_backward( self.link_wrapper, (x_data, mask), y_grad, (self.link.W, self.link.b), dtype='d', **self.check_backward_options)
Example #23
Source File: test_maxout.py From chainer with MIT License | 5 votes |
def test_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.link.to_gpu() self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
Example #24
Source File: test_maxout.py From chainer with MIT License | 5 votes |
def check_backward(self, x_data, y_grad): params = [self.link.linear.W] if self.initial_bias is not None: params.append(self.link.linear.b) gradient_check.check_backward( self.link, x_data, y_grad, params, atol=1e-2)
Example #25
Source File: test_highway.py From chainer with MIT License | 5 votes |
def test_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.link.to_gpu() self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
Example #26
Source File: test_highway.py From chainer with MIT License | 5 votes |
def test_backward_cpu(self): self.check_backward(self.x, self.gy)
Example #27
Source File: test_highway.py From chainer with MIT License | 5 votes |
def check_backward(self, x_data, y_grad): gradient_check.check_backward( self.link, x_data, y_grad, (self.link.plain.W, self.link.plain.b, self.link.transform.W, self.link.transform.b), eps=1e-2, atol=3.2e-3, rtol=1e-2)
Example #28
Source File: test_scale.py From chainer with MIT License | 5 votes |
def test_backward_gpu(self): with testing.assert_warns(DeprecationWarning): self.link.to_gpu() x = cuda.to_gpu(self.x) if self.learn_W: W = None else: W = cuda.to_gpu(self.W) gy = cuda.to_gpu(self.gy) self.check_backward(x, W, gy)
Example #29
Source File: test_scale.py From chainer with MIT License | 5 votes |
def check_backward(self, x_data, W_data, y_grad): if W_data is None: params = [self.link.W] gradient_check.check_backward( self.link, x_data, y_grad, params, atol=1e-2) else: gradient_check.check_backward( self.link, (x_data, W_data), y_grad, atol=1e-2)
Example #30
Source File: test_bilinear.py From chainer with MIT License | 5 votes |
def _check_backward(e1, e2, y_grad, link, bias): params = [link.W] if bias: params.append(link.b) gradient_check.check_backward( link, (e1, e2), y_grad, params, eps=1e-2, rtol=1e-3)