Python chainer.functions.unpooling_2d() Examples

The following are 30 code examples of chainer.functions.unpooling_2d(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer.functions , or try the search function .
Example #1
Source File: network.py    From chainer-PGGAN with MIT License 6 votes vote down vote up
def __call__(self, x, alpha=1.0):
        if self.depth > 0 and alpha < 1.0:
            h = x
            for i in range(self.depth-1):
                h = self['b%d'%i](h)

            h1 = self['b%d'%(self.depth-1)](h)
            h2 = F.unpooling_2d(h1, 2, 2, outsize=self['b%d'%self.depth].outsize)
            h3 = self['b%d'%(self.depth-1)].toRGB(h2)
            h4 = self['b%d'%self.depth](h1, True)
            
            h = h3 * (1 - alpha) + h4 * alpha
        else:
            h = x
            for i in range(self.depth):
                h = self['b%d'%i](h)

            h = self['b%d'%self.depth](h, True)
        
        return h 
Example #2
Source File: fpn.py    From chainer-compiler with MIT License 6 votes vote down vote up
def forward(self, x):
        hs = self.base(x)

        with flags.for_unroll():
            for i in range(self.n_base_output_minus1, -1, -1):
                hs[i] = self.inner[i](hs[i])
                if i < self.n_base_output_minus1:
                    hs[i] += F.unpooling_2d(hs[i + 1], 2, cover_all=False)

            for i in range(self.n_base_output):
                hs[i] = self.outer[i](hs[i])

            for _ in range(self.scales_minus_n_base_output):
                hs.append(F.max_pooling_2d(hs[-1], 1, stride=2, cover_all=False))

        return hs


# ====================================== 
Example #3
Source File: net.py    From chainer-cyclegan with MIT License 6 votes vote down vote up
def __call__(self, x, test):
        if self.sample=="down" or self.sample=="none" or self.sample=='none-9' or self.sample=='none-7' or self.sample=='none-5':
            h = self.c(x)
        elif self.sample=="up":
            h = F.unpooling_2d(x, 2, 2, 0, cover_all=False)
            h = self.c(h)
        else:
            print("unknown sample method %s"%self.sample)
        if self.bn:
            h = self.batchnorm(h, test=test)
        if self.noise:
            h = add_noise(h, test=test)
        if self.dropout:
            h = F.dropout(h, train=not test)
        if not self.activation is None:
            h = self.activation(h)
        return h 
Example #4
Source File: test_unpooling_nd.py    From chainer with MIT License 5 votes vote down vote up
def check_backward_consistency_regression(self, backend_config):
        # Regression test to two-dimensional unpooling layer.

        x_data, = self.generate_inputs()
        gy_data = numpy.random.uniform(-1, 1, self.gy_shape).astype(self.dtype)

        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        xp = backend.get_array_module(x_data)

        # Backward computation for N-dimensional unpooling layer.
        x_nd = chainer.Variable(xp.array(x_data))
        y_nd = functions.unpooling_nd(
            x_nd, ksize, stride=stride, pad=pad, cover_all=self.cover_all)
        y_nd.grad = gy_data
        y_nd.backward()

        # Backward computation for two-dimensional unpooling layer.
        x_2d = chainer.Variable(xp.array(x_data))
        y_2d = functions.unpooling_2d(
            x_2d, ksize, stride=stride, pad=pad, cover_all=self.cover_all)
        y_2d.grad = gy_data
        y_2d.backward()

        # Test that the two result gradients are close enough.
        opt = self.check_backward_options
        testing.assert_allclose(
            x_nd.grad, x_2d.grad, atol=opt['atol'], rtol=opt['rtol']) 
Example #5
Source File: net.py    From PredNet with Apache License 2.0 5 votes vote down vote up
def __call__(self, x):
        for nth in range(self.layers):
            if getattr(self, 'P' + str(nth)) is None:
                setattr(self, 'P' + str(nth), variable.Variable(
                    self.xp.zeros(self.sizes[nth], dtype=x.data.dtype),
                    volatile='auto'))

        E = [None] * self.layers
        for nth in range(self.layers):
            if nth == 0:
                E[nth] = F.concat((F.relu(x - getattr(self, 'P' + str(nth))),
                                  F.relu(getattr(self, 'P' + str(nth)) - x)))
            else:
                A = F.max_pooling_2d(F.relu(getattr(self, 'ConvA' + str(nth))(E[nth - 1])), 2, stride = 2)
                E[nth] = F.concat((F.relu(A - getattr(self, 'P' + str(nth))),
                                  F.relu(getattr(self, 'P' + str(nth)) - A)))

        R = [None] * self.layers
        for nth in reversed(range(self.layers)):
            if nth == self.layers - 1:
                R[nth] = getattr(self, 'ConvLSTM' + str(nth))((E[nth],))
            else:
                upR = F.unpooling_2d(R[nth + 1], 2, stride = 2, cover_all=False)
                R[nth] = getattr(self, 'ConvLSTM' + str(nth))((E[nth], upR))

            if nth == 0:
                setattr(self, 'P' + str(nth), F.clipped_relu(getattr(self, 'ConvP' + str(nth))(R[nth]), 1.0))
            else:
                setattr(self, 'P' + str(nth), F.relu(getattr(self, 'ConvP' + str(nth))(R[nth])))
        
        return self.P0 
Example #6
Source File: test_unpooling_2d.py    From chainer with MIT License 5 votes vote down vote up
def check_forward(self, x_data):
        x = chainer.Variable(x_data)
        y = functions.unpooling_2d(x, self.ksize, outsize=self.outsize,
                                   cover_all=self.cover_all)
        self.assertEqual(y.data.dtype, self.dtype)
        y_data = cuda.to_cpu(y.data)

        self.assertEqual(self.gy.shape, y_data.shape)
        for i in six.moves.range(self.N):
            for c in six.moves.range(self.n_channels):
                outsize = self.outsize or self.expected_outsize
                assert y_data.shape[2:] == outsize
                if outsize == (5, 2):
                    expect = numpy.zeros(outsize, dtype=self.dtype)
                    expect[:2, :] = self.x[i, c, 0, 0]
                    expect[2:4, :] = self.x[i, c, 1, 0]
                elif outsize == (4, 2):
                    expect = numpy.array([
                        [self.x[i, c, 0, 0], self.x[i, c, 0, 0]],
                        [self.x[i, c, 0, 0], self.x[i, c, 0, 0]],
                        [self.x[i, c, 1, 0], self.x[i, c, 1, 0]],
                        [self.x[i, c, 1, 0], self.x[i, c, 1, 0]],
                    ])
                elif outsize == (3, 1):
                    expect = numpy.array([
                        [self.x[i, c, 0, 0]],
                        [self.x[i, c, 0, 0]],
                        [self.x[i, c, 1, 0]],
                    ])
                else:
                    raise ValueError('Unsupported outsize: {}'.format(outsize))
                testing.assert_allclose(expect, y_data[i, c]) 
Example #7
Source File: test_unpooling_2d.py    From chainer with MIT License 5 votes vote down vote up
def check_backward(self, x_data, y_grad):
        def f(x):
            return functions.unpooling_2d(x, self.ksize, outsize=self.outsize,
                                          cover_all=self.cover_all)
        gradient_check.check_backward(
            f, x_data, y_grad, dtype=numpy.float64,
            **self.check_backward_options) 
Example #8
Source File: test_unpooling_2d.py    From chainer with MIT License 5 votes vote down vote up
def check_double_backward(self, x_data, y_grad, x_grad_grad,
                              use_cudnn='always'):
        def f(x):
            return functions.unpooling_2d(x, self.ksize, outsize=self.outsize,
                                          cover_all=self.cover_all)
        with chainer.using_config('use_cudnn', use_cudnn):
            gradient_check.check_double_backward(
                f, x_data, y_grad, x_grad_grad, dtype=numpy.float64,
                **self.check_double_backward_options) 
Example #9
Source File: test_unpooling_2d.py    From chainer with MIT License 5 votes vote down vote up
def check_backward(self, x_data, y_grad):
        def f(x):
            return functions.unpooling_2d(x, self.ksize, outsize=self.outsize,
                                          pad=self.pad)
        gradient_check.check_backward(
            f, x_data, y_grad, dtype=numpy.float64,
            **self.check_backward_options) 
Example #10
Source File: test_unpooling_2d.py    From chainer with MIT License 5 votes vote down vote up
def check_double_backward(self, x_data, y_grad, x_grad_grad,
                              use_cudnn='always'):
        def f(x):
            return functions.unpooling_2d(x, self.ksize, outsize=self.outsize,
                                          pad=self.pad)
        with chainer.using_config('use_cudnn', use_cudnn):
            gradient_check.check_double_backward(
                f, x_data, y_grad, x_grad_grad, dtype=numpy.float64,
                **self.check_double_backward_options) 
Example #11
Source File: test_unpooling_2d.py    From chainer with MIT License 5 votes vote down vote up
def check_left_inverse(self, xp, use_cudnn='never'):
        x = xp.arange(self.h * self.h).reshape(
            (1, 1, self.h, self.h)).astype(self.dtype)
        with chainer.using_config('use_cudnn', use_cudnn):
            y = chainer.functions.unpooling_2d(
                x, self.k, self.s, self.p, None, self.cover_all)
            x_ = chainer.functions.max_pooling_2d(
                y, self.k, self.s, self.p, self.cover_all).data

        self.assertEqual(x.shape, x_.shape)
        self.assertEqual(x.dtype, x_.dtype)
        chainer.testing.assert_allclose(x, x_) 
Example #12
Source File: test_unpooling_2d.py    From chainer with MIT License 5 votes vote down vote up
def check_left_inverse(self, xp, use_cudnn='never'):
        x = xp.arange(self.h * self.h).reshape(
            (1, 1, self.h, self.h)).astype(self.dtype)
        with chainer.using_config('use_cudnn', use_cudnn):
            # average_pooling_2d does not have cover_all option
            # as max_pooling_2d has.
            y = chainer.functions.unpooling_2d(
                x, self.k, self.s, self.p, None, False)
            x_ = chainer.functions.average_pooling_2d(
                y, self.k, self.s, self.p).data

        self.assertEqual(x.shape, x_.shape)
        self.assertEqual(x.dtype, x_.dtype)
        chainer.testing.assert_allclose(x, x_) 
Example #13
Source File: yolo_v3.py    From chainercv with MIT License 5 votes vote down vote up
def _upsample(x):
    return F.unpooling_2d(x, 2, cover_all=False) 
Example #14
Source File: fpn.py    From chainercv with MIT License 5 votes vote down vote up
def forward(self, x):
        hs = list(self.base(x))

        for i in reversed(range(len(hs))):
            hs[i] = self.inner[i](hs[i])
            if i + 1 < len(hs):
                hs[i] += F.unpooling_2d(hs[i + 1], 2, cover_all=False)

        for i in range(len(hs)):
            hs[i] = self.outer[i](hs[i])

        while len(hs) < len(self.scales):
            hs.append(F.max_pooling_2d(hs[-1], 1, stride=2, cover_all=False))

        return hs 
Example #15
Source File: net.py    From chainer-gan-lib with MIT License 5 votes vote down vote up
def __call__(self, x):
        h = F.unpooling_2d(x, 2, 2, 0, outsize=(x.shape[2]*2, x.shape[3]*2))
        h = F.leaky_relu(feature_vector_normalization(self.c0(h)))
        h = F.leaky_relu(feature_vector_normalization(self.c1(h)))
        return h 
Example #16
Source File: net.py    From chainer-gan-lib with MIT License 5 votes vote down vote up
def __call__(self, z, stage):
        # stage0: c0->c1->out0
        # stage1: c0->c1-> (1-a)*(up->out0) + (a)*(b1->out1)
        # stage2: c0->c1->b1->out1
        # stage3: c0->c1->b1-> (1-a)*(up->out1) + (a)*(b2->out2)
        # stage4: c0->c1->b2->out2
        # ...

        stage = min(stage, self.max_stage)
        alpha = stage - math.floor(stage)
        stage = math.floor(stage)

        h = F.reshape(z,(len(z), self.n_hidden, 1, 1))
        h = F.leaky_relu(feature_vector_normalization(self.c0(h)))
        h = F.leaky_relu(feature_vector_normalization(self.c1(h)))

        for i in range(1, int(stage//2+1)):
            h = getattr(self, "b%d"%i)(h)

        if int(stage)%2==0:
            out = getattr(self, "out%d"%(stage//2))
            x = out(h)
        else:
            out_prev = getattr(self, "out%d"%(stage//2))
            out_curr = getattr(self, "out%d"%(stage//2+1))
            b_curr = getattr(self, "b%d"%(stage//2+1))

            x_0 = out_prev(F.unpooling_2d(h, 2, 2, 0, outsize=(2*h.shape[2], 2*h.shape[3])))
            x_1 = out_curr(b_curr(h))
            x = (1.0-alpha)*x_0 + alpha*x_1

        if chainer.configuration.config.train:
            return x
        else:
            scale = int(32 // x.data.shape[2])
            return F.unpooling_2d(x, scale, scale, 0, outsize=(32,32)) 
Example #17
Source File: net.py    From chainer-gan-lib with MIT License 5 votes vote down vote up
def __call__(self, x):
        h = self.c0(F.unpooling_2d(F.relu(self.bn0(x)), 2, 2, 0, cover_all=False))
        h = self.c1(F.relu(self.bn1(h)))
        hs = self.cs(F.unpooling_2d(x, 2, 2, 0, cover_all=False))
        return h + hs 
Example #18
Source File: ops.py    From chainer-gan-experiments with MIT License 5 votes vote down vote up
def _do_before_cal(self, x):
        if self.nn == 'up_unpooling':
            x = F.unpooling_2d(x, 2, 2, 0, cover_all=False)
        return x 
Example #19
Source File: net.py    From pixcaler with MIT License 5 votes vote down vote up
def __call__(self, x):
        return self.conv(F.unpooling_2d(x, self.r, self.r, 0, cover_all=False))

# https://gist.github.com/musyoku/849094afca2889d9024f59e683fa7036 
Example #20
Source File: block.py    From Deep_VoiceChanger with MIT License 5 votes vote down vote up
def _upsample(x):
    h, w = x.shape[2:]
    return F.unpooling_2d(x, 2, outsize=(h * 2, w * 2)) 
Example #21
Source File: Unpooling2D.py    From chainer-compiler with MIT License 5 votes vote down vote up
def forward(self, x):
        y = F.unpooling_2d(x, 2, cover_all=False)
        return y 
Example #22
Source File: block.py    From Deep_VoiceChanger with MIT License 5 votes vote down vote up
def _upsample_frq(x):
    h, w = x.shape[2:]
    return F.unpooling_2d(x, (1,2), outsize=(h, w * 2)) 
Example #23
Source File: block_1d.py    From Deep_VoiceChanger with MIT License 5 votes vote down vote up
def _upsample(x):
    h, w = x.shape[2:]
    return F.unpooling_2d(x, 2, outsize=(h * 2, w * 2)) 
Example #24
Source File: block_1d.py    From Deep_VoiceChanger with MIT License 5 votes vote down vote up
def _upsample_frq(x):
    h, w = x.shape[2:]
    return F.unpooling_2d(x, (1,2), outsize=(h, w * 2)) 
Example #25
Source File: rescale.py    From chainer-stylegan with MIT License 5 votes vote down vote up
def upscale2x(h):
    return F.unpooling_2d(h, 2, 2, 0, outsize=(h.shape[2] * 2, h.shape[3] * 2)) 
Example #26
Source File: Unpooling2D.py    From chainer-compiler with MIT License 5 votes vote down vote up
def forward(self, x):
        y = F.unpooling_2d(x, 2, cover_all=False)
        return y 
Example #27
Source File: Unpooling2D.py    From chainer-compiler with MIT License 5 votes vote down vote up
def forward(self, x):
        y = F.unpooling_2d(x, (3, 4), cover_all=False)
        return y


# ====================================== 
Example #28
Source File: test_unpooling_nd.py    From chainer with MIT License 5 votes vote down vote up
def check_forward_consistency_regression(self, backend_config):
        # Regression test to two-dimensional unpooling layer.
        inputs, = self.generate_inputs()
        x = chainer.Variable(backend_config.get_array(inputs))

        ksize = self.ksize
        stride = self.stride
        pad = self.pad

        y_nd = functions.unpooling_nd(x, ksize, stride=stride, pad=pad,
                                      cover_all=self.cover_all)
        y_2d = functions.unpooling_2d(x, ksize, stride=stride, pad=pad,
                                      cover_all=self.cover_all)
        testing.assert_allclose(
            y_nd.array, y_2d.array, **self.check_forward_options) 
Example #29
Source File: Unpooling2D.py    From chainer-compiler with MIT License 5 votes vote down vote up
def forward(self, x):
        y = F.unpooling_2d(x, (3, 4), cover_all=False)
        return y


# ====================================== 
Example #30
Source File: net_pre-trained.py    From chainer-partial_convolution_image_inpainting with MIT License 5 votes vote down vote up
def __call__(self, x, mask):
        #h = self.c(x) - self.b
        self.m.W.data = self.xp.array(self.maskW) #mask windows are set by 1
        h = self.c(x*mask) #(B,C,H,W)
        B,C,H,W = h.shape
        #b = F.transpose(F.broadcast_to(self.c.b,(B,H,W,C)),(0,3,1,2))
        #h = h - b
        mask_sums = self.m(mask)
        mask_new = (self.xp.sign(mask_sums.data-0.5)+1.0)*0.5
        mask_new_b = mask_new.astype("bool")
        
        mask_sums = F.where(mask_new_b,mask_sums,0.01*Variable(self.xp.ones(mask_sums.shape).astype("f")))
        h = h/mask_sums 
        #h = h/mask_sums + b
         
        mask_new = Variable(mask_new)
        h = F.where(mask_new_b, h, Variable(self.xp.zeros(h.shape).astype("f"))) 

        #elif self.sample=="up":
        #    h = F.unpooling_2d(x, 2, 2, 0, cover_all=False)
        #    h = self.c(h)
        #else:
        #    print("unknown sample method %s"%self.sample)
        if self.bn:
            h = self.batchnorm(h)
        if self.noise:
            h = add_noise(h)
        if self.dropout:
            h = F.dropout(h)
        if not self.activation is None:
            h = self.activation(h)
        return h, mask_new