Python chainer.functions.transpose_sequence() Examples

The following are 9 code examples of chainer.functions.transpose_sequence(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer.functions , or try the search function .
Example #1
Source File: MyLSTM.py    From chainer-compiler with MIT License 5 votes vote down vote up
def run_with_n_step_lstm(xs, h, c, w, b):
    xs = F.transpose_sequence(xs)
    print(w.shape)
    wx, wh = F.split_axis(w, 2, 1)
    ws = F.split_axis(wx, 4, 0) + F.split_axis(wh, 4, 0)
    b = b / 2
    bs = F.split_axis(b, 4, 0) * 2
    print(bs)
    h, _, _ = F.n_step_lstm(1, 0.0, h, c, ws, bs, xs)
    return h 
Example #2
Source File: test_transpose_sequence.py    From chainer with MIT License 5 votes vote down vote up
def check_forward(self, xs_data):
        xs = [chainer.Variable(x) for x in xs_data]
        ys = functions.transpose_sequence(xs)
        self.assertEqual(len(ys), len(self.trans_lengths))
        for y, l in zip(ys, self.trans_lengths):
            self.assertEqual(len(y.data), l)

        for i, l in enumerate(self.trans_lengths):
            for j in six.moves.range(l):
                testing.assert_allclose(ys[i].data[j], self.xs[j][i]) 
Example #3
Source File: test_transpose_sequence.py    From chainer with MIT License 5 votes vote down vote up
def check_backward(self, xs_data, gs_data):
        # In this situation the function returns no result
        if len(self.trans_lengths) == 0:
            return

        def f(*xs):
            return functions.transpose_sequence(xs)

        gradient_check.check_backward(
            f, tuple(xs_data), tuple(gs_data)) 
Example #4
Source File: test_arrays.py    From chainer with MIT License 5 votes vote down vote up
def test_output(self):

        class Model(chainer.Chain):
            def __init__(self):
                super(Model, self).__init__()

            def __call__(self, *xs):
                return F.transpose_sequence(xs)

        model = Model()
        xs = [input_generator.increasing(*shape) for
              shape in self.in_shapes]

        self.expect(model, xs, name=self.name) 
Example #5
Source File: model.py    From blstm-cws with MIT License 5 votes vote down vote up
def __call__(self, xs, ys):
        xs = permutate_list(xs, argsort_list_descent(xs), inv=False)
        xs = F.transpose_sequence(xs)
        ys = permutate_list(ys, argsort_list_descent(ys), inv=False)
        ys = F.transpose_sequence(ys)
        return super(CRF, self).__call__(xs, ys) 
Example #6
Source File: model.py    From blstm-cws with MIT License 5 votes vote down vote up
def argmax(self, xs):
        xs = permutate_list(xs, argsort_list_descent(xs), inv=False)
        xs = F.transpose_sequence(xs)
        score, path = super(CRF, self).argmax(xs)
        path = F.transpose_sequence(path)
        return score, path 
Example #7
Source File: train_recursive_minibatch.py    From chainer with MIT License 4 votes vote down vote up
def forward(self, *inputs):
        batch = len(inputs) // 6
        lefts = inputs[0: batch]
        rights = inputs[batch: batch * 2]
        dests = inputs[batch * 2: batch * 3]
        labels = inputs[batch * 3: batch * 4]
        sequences = inputs[batch * 4: batch * 5]
        leaf_labels = inputs[batch * 5: batch * 6]

        inds = numpy.argsort([-len(l) for l in lefts])
        # Sort all arrays in descending order and transpose them
        lefts = F.transpose_sequence([lefts[i] for i in inds])
        rights = F.transpose_sequence([rights[i] for i in inds])
        dests = F.transpose_sequence([dests[i] for i in inds])
        labels = F.transpose_sequence([labels[i] for i in inds])
        sequences = F.transpose_sequence([sequences[i] for i in inds])
        leaf_labels = F.transpose_sequence(
            [leaf_labels[i] for i in inds])

        batch = len(inds)
        maxlen = len(sequences)

        loss = 0
        count = 0
        correct = 0

        dtype = chainer.get_dtype()
        stack = self.xp.zeros((batch, maxlen * 2, self.n_units), dtype)
        for i, (word, label) in enumerate(zip(sequences, leaf_labels)):
            batch = word.shape[0]
            es = self.leaf(word)
            ds = self.xp.full((batch,), i, self.xp.int32)
            y = self.label(es)
            loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
            count += batch
            predict = self.xp.argmax(y.array, axis=1)
            correct += (predict == label.array).sum()

            stack = thin_stack.thin_stack_set(stack, ds, es)

        for left, right, dest, label in zip(lefts, rights, dests, labels):
            l, stack = thin_stack.thin_stack_get(stack, left)
            r, stack = thin_stack.thin_stack_get(stack, right)
            o = self.node(l, r)
            y = self.label(o)
            batch = l.shape[0]
            loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
            count += batch
            predict = self.xp.argmax(y.array, axis=1)
            correct += (predict == label.array).sum()

            stack = thin_stack.thin_stack_set(stack, dest, o)

        loss /= count
        reporter.report({'loss': loss}, self)
        reporter.report({'total': count}, self)
        reporter.report({'correct': correct}, self)
        return loss 
Example #8
Source File: train_recursive_minibatch.py    From pfio with MIT License 4 votes vote down vote up
def forward(self, *inputs):
        batch = len(inputs) // 6
        lefts = inputs[0: batch]
        rights = inputs[batch: batch * 2]
        dests = inputs[batch * 2: batch * 3]
        labels = inputs[batch * 3: batch * 4]
        sequences = inputs[batch * 4: batch * 5]
        leaf_labels = inputs[batch * 5: batch * 6]

        inds = numpy.argsort([-len(l) for l in lefts])
        # Sort all arrays in descending order and transpose them
        lefts = F.transpose_sequence([lefts[i] for i in inds])
        rights = F.transpose_sequence([rights[i] for i in inds])
        dests = F.transpose_sequence([dests[i] for i in inds])
        labels = F.transpose_sequence([labels[i] for i in inds])
        sequences = F.transpose_sequence([sequences[i] for i in inds])
        leaf_labels = F.transpose_sequence(
            [leaf_labels[i] for i in inds])

        batch = len(inds)
        maxlen = len(sequences)

        loss = 0
        count = 0
        correct = 0

        stack = self.xp.zeros((batch, maxlen * 2, self.n_units), 'f')
        for i, (word, label) in enumerate(zip(sequences, leaf_labels)):
            batch = word.shape[0]
            es = self.leaf(word)
            ds = self.xp.full((batch,), i, 'i')
            y = self.label(es)
            loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
            count += batch
            predict = self.xp.argmax(y.data, axis=1)
            correct += (predict == label.data).sum()

            stack = thin_stack.thin_stack_set(stack, ds, es)

        for left, right, dest, label in zip(lefts, rights, dests, labels):
            l, stack = thin_stack.thin_stack_get(stack, left)
            r, stack = thin_stack.thin_stack_get(stack, right)
            o = self.node(l, r)
            y = self.label(o)
            batch = l.shape[0]
            loss += F.softmax_cross_entropy(y, label, normalize=False) * batch
            count += batch
            predict = self.xp.argmax(y.data, axis=1)
            correct += (predict == label.data).sum()

            stack = thin_stack.thin_stack_set(stack, dest, o)

        loss /= count
        reporter.report({'loss': loss}, self)
        reporter.report({'total': count}, self)
        reporter.report({'correct': correct}, self)
        return loss 
Example #9
Source File: bi_lstm.py    From deep-crf with MIT License 4 votes vote down vote up
def predict(self, y_list, t, compute_loss=True):
        predict_list = []
        cnt = 0
        for n_len in self.n_length:
            pred = F.concat(y_list[cnt:cnt + n_len], axis=0)
            predict_list.append(pred)
            cnt += n_len

        inds = self.inds
        # inds_trans = [inds[i] for i in inds]
        inds_rev = sorted([(i, ind) for i, ind in enumerate(inds)], key=lambda x: x[1])

        hs = [predict_list[i] for i in inds]
        ts_original = None
        if compute_loss:
            ts_original = [self.xp.array(t[i], self.xp.int32) for i in inds]

        hs = F.transpose_sequence(hs)

        loss = None
        if compute_loss and ts_original is not None:
            # loss
            ts = F.transpose_sequence(ts_original)
            loss = self.lossfun(hs, ts)

        # predict
        score, predicts_trans = self.lossfun.argmax(hs)
        predicts = F.transpose_sequence(predicts_trans)
        gold_predict_pairs = []
        if compute_loss:
            for pred, gold in zip(predicts, ts_original):
                pred = to_cpu(pred.data)
                gold = to_cpu(gold)
                gold_predict_pairs.append([gold, pred])
        else:
            for pred in predicts:
                pred = to_cpu(pred.data)
                gold_predict_pairs.append([pred])

        gold_predict_pairs = [gold_predict_pairs[e_i] for e_i, _ in inds_rev]
        self.y = gold_predict_pairs

        return gold_predict_pairs, loss