Python chainer.report() Examples

The following are 30 code examples of chainer.report(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer , or try the search function .
Example #1
Source File: seq2seq.py    From chainer with MIT License 6 votes vote down vote up
def __call__(self, trainer):
        device = self.device

        with chainer.no_backprop_mode():
            references = []
            hypotheses = []
            for i in range(0, len(self.test_data), self.batch):
                sources, targets = zip(*self.test_data[i:i + self.batch])
                references.extend([[t.tolist()] for t in targets])

                sources = [device.send(x) for x in sources]
                ys = [y.tolist()
                      for y in self.model.translate(sources, self.max_length)]
                hypotheses.extend(ys)

        bleu = bleu_score.corpus_bleu(
            references, hypotheses,
            smoothing_function=bleu_score.SmoothingFunction().method1)
        chainer.report({self.key: bleu}) 
Example #2
Source File: updater.py    From Semantic-Segmentation-using-Adversarial-Networks with MIT License 6 votes vote down vote up
def _get_loss_gen(self):
        batchsize = self.y_fake.data.shape[0]
        L_mce = F.softmax_cross_entropy(self.pred_label_map, self.ground_truth, normalize=False)
        L_bce = F.softmax_cross_entropy(self.y_fake, Variable(self.xp.ones(batchsize, dtype=self.xp.int32), volatile=not self.gen.train))
        loss = L_mce + self.L_bce_weight * L_bce

        # log report
        label_true = chainer.cuda.to_cpu(self.ground_truth.data)
        label_pred = chainer.cuda.to_cpu(self.pred_label_map.data).argmax(axis=1)
        logs = []
        for i in six.moves.range(batchsize):
            acc, acc_cls, iu, fwavacc = utils.label_accuracy_score(
                label_true[i], label_pred[i], self.n_class)
            logs.append((acc, acc_cls, iu, fwavacc))
        log = np.array(logs).mean(axis=0)
        values = {
            'loss': loss,
            'accuracy': log[0],
            'accuracy_cls': log[1],
            'iu': log[2],
            'fwavacc': log[3],
        }
        chainer.report(values, self.gen)

        return loss 
Example #3
Source File: updater.py    From Semantic-Segmentation-using-Adversarial-Networks with MIT License 6 votes vote down vote up
def calc_loss(self):
        batchsize = self.ground_truth.shape[0]
        self.loss = F.softmax_cross_entropy(self.pred_label_map, self.ground_truth, normalize=False)

        # log report
        label_true = chainer.cuda.to_cpu(self.ground_truth.data)
        label_pred = chainer.cuda.to_cpu(self.pred_label_map.data).argmax(axis=1)
        logs = []
        for i in six.moves.range(batchsize):
            acc, acc_cls, iu, fwavacc = utils.label_accuracy_score(
                label_true[i], label_pred[i], self.n_class)
            logs.append((acc, acc_cls, iu, fwavacc))
        log = np.array(logs).mean(axis=0)
        values = {
            'loss': self.loss,
            'accuracy': log[0],
            'accuracy_cls': log[1],
            'iu': log[2],
            'fwavacc': log[3],
        }
        chainer.report(values, self.model) 
Example #4
Source File: seq2seq_chainerio.py    From pfio with MIT License 6 votes vote down vote up
def forward(self, trainer):
        with chainer.no_backprop_mode():
            references = []
            hypotheses = []
            for i in range(0, len(self.test_data), self.batch):
                sources, targets = zip(*self.test_data[i:i + self.batch])
                references.extend([[t.tolist()] for t in targets])

                sources = [
                    chainer.dataset.to_device(self.device, x) for x in sources]
                ys = [y.tolist()
                      for y in self.model.translate(sources, self.max_length)]
                hypotheses.extend(ys)

        bleu = bleu_score.corpus_bleu(
            references, hypotheses,
            smoothing_function=bleu_score.SmoothingFunction().method1)
        chainer.report({self.key: bleu}) 
Example #5
Source File: Alex_with_loss.py    From chainer-compiler with MIT License 6 votes vote down vote up
def forward(self, x, t):
        # def forward(self, x):
        h = F.max_pooling_2d(F.local_response_normalization(
            F.relu(self.conv1(x))), 3, stride=2)
        h = F.max_pooling_2d(F.local_response_normalization(
            F.relu(self.conv2(h))), 3, stride=2)
        h = F.relu(self.conv3(h))
        h = F.relu(self.conv4(h))
        h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
        h = F.dropout(F.relu(self.fc6(h)))
        h = F.dropout(F.relu(self.fc7(h)))
        h = self.fc8(h)

        loss = F.softmax_cross_entropy(h, t)
        #loss = h

        # chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
        return loss

# from https://github.com/chainer/chainer/blob/master/examples/imagenet/alex.py 
Example #6
Source File: test_reporter.py    From chainer with MIT License 6 votes vote down vote up
def test_add_observers(self):
        reporter = chainer.Reporter()
        observer1 = object()
        reporter.add_observer('o1', observer1)
        observer2 = object()
        reporter.add_observer('o2', observer2)

        reporter.report({'x': 1}, observer1)
        reporter.report({'y': 2}, observer2)

        observation = reporter.observation
        self.assertIn('o1/x', observation)
        self.assertEqual(observation['o1/x'], 1)
        self.assertIn('o2/y', observation)
        self.assertEqual(observation['o2/y'], 2)
        self.assertNotIn('x', observation)
        self.assertNotIn('y', observation)
        self.assertNotIn('o1/y', observation)
        self.assertNotIn('o2/x', observation) 
Example #7
Source File: fcn8s_matting.py    From portrait_matting with GNU General Public License v3.0 6 votes vote down vote up
def __call__(self, x, t=None, w=None):
        # t, w is on host.

        # Forward network
        alpha = self.forward(x)

        if t is None:
            assert not chainer.config.train
            return

        # Weighted mean squared error
        # TODO: Do more tests
#         loss = F.mean(F.squared_error(alpha, t) * w)
        loss = F.mean_squared_error(alpha, t)

        if np.isnan(float(loss.data)):
            raise ValueError('Loss is nan.')
        chainer.report({'loss': loss}, self)

        return loss 
Example #8
Source File: fcn8s.py    From portrait_matting with GNU General Public License v3.0 6 votes vote down vote up
def __call__(self, x, t=None):
        score = self.forward(x)

        if t is None:
            assert not chainer.config.train
            return

        loss = F.softmax_cross_entropy(score, t, normalize=True)
        if np.isnan(float(loss.data)):
            raise ValueError('Loss is nan.')
        chainer.report({'loss': loss}, self)

        accuracy = F.accuracy(score, t)
        chainer.report({'accuracy': accuracy}, self)

        return loss 
Example #9
Source File: reporter.py    From chainer with MIT License 6 votes vote down vote up
def scope(self, observation):
        """Creates a scope to report observed values to ``observation``.

        This is a context manager to be passed to ``with`` statements. In this
        scope, the observation dictionary is changed to the given one.

        It also makes this reporter object current.

        Args:
            observation (dict): Observation dictionary. All observations
                reported inside of the ``with`` statement are written to this
                dictionary.

        """
        old = self.observation
        self.observation = observation
        self.__enter__()
        try:
            yield
        finally:
            self.__exit__(None, None, None)
            self.observation = old 
Example #10
Source File: updater.py    From become-yukarin with MIT License 6 votes vote down vote up
def _loss_discriminator(self, discriminator, d_real, d_fake):
        b, _, t = d_real.data.shape

        loss_real = F.sum(F.softplus(-d_real)) / (b * t)
        chainer.report({'real': loss_real}, discriminator)

        loss_fake = F.sum(F.softplus(d_fake)) / (b * t)
        chainer.report({'fake': loss_fake}, discriminator)

        loss = loss_real + loss_fake
        chainer.report({'loss': loss}, discriminator)

        tp = (d_real.data > 0.5).sum()
        fp = (d_fake.data > 0.5).sum()
        fn = (d_real.data <= 0.5).sum()
        tn = (d_fake.data <= 0.5).sum()
        accuracy = (tp + tn) / (tp + fp + fn + tn)
        precision = tp / (tp + fp)
        recall = tp / (tp + fn)
        chainer.report({'accuracy': accuracy}, self.discriminator)
        chainer.report({'precision': precision}, self.discriminator)
        chainer.report({'recall': recall}, self.discriminator)
        return loss 
Example #11
Source File: resnet50.py    From chainer-compiler with MIT License 6 votes vote down vote up
def forward(self, x, t):
        h = self.bn1(self.conv1(x))
        h = F.max_pooling_2d(F.relu(h), 3, stride=2)
        h = self.res2(h)
        h = self.res3(h)
        h = self.res4(h)
        h = self.res5(h)
        h = F.average_pooling_2d(h, 7, stride=1)
        h = self.fc(h)

        #loss = F.softmax_cross_entropy(h, t)
        loss = self.softmax_cross_entropy(h, t)
        if self.compute_accuracy:
            chainer.report({'loss': loss, 'accuracy': F.accuracy(h, np.argmax(t, axis=1))}, self)
        else:
            chainer.report({'loss': loss}, self)
        return loss 
Example #12
Source File: alex.py    From chainer-compiler with MIT License 6 votes vote down vote up
def forward(self, x, t):
        h = F.max_pooling_2d(F.local_response_normalization(
            F.relu(self.conv1(x))), 3, stride=2)
        h = F.max_pooling_2d(F.local_response_normalization(
            F.relu(self.conv2(h))), 3, stride=2)
        h = F.relu(self.conv3(h))
        h = F.relu(self.conv4(h))
        h = F.max_pooling_2d(F.relu(self.conv5(h)), 3, stride=2)
        h = F.dropout(F.relu(self.fc6(h)))
        h = F.dropout(F.relu(self.fc7(h)))
        h = self.fc8(h)

        # EDIT(hamaji): ONNX-chainer cannot output SoftmaxCrossEntropy.
        # loss = F.softmax_cross_entropy(h, t)
        loss = self.softmax_cross_entropy(h, t)
        if self.compute_accuracy:
            chainer.report({'loss': loss, 'accuracy': F.accuracy(h, t)}, self)
        else:
            chainer.report({'loss': loss}, self)
        return loss 
Example #13
Source File: sr_updater.py    From become-yukarin with MIT License 6 votes vote down vote up
def _loss_discriminator(self, discriminator, d_real, d_fake):
        b, _, w, h = d_real.data.shape

        loss_real = F.sum(F.softplus(-d_real)) / (b * w * h)
        chainer.report({'real': loss_real}, discriminator)

        loss_fake = F.sum(F.softplus(d_fake)) / (b * w * h)
        chainer.report({'fake': loss_fake}, discriminator)

        loss = loss_real + loss_fake
        chainer.report({'loss': loss}, discriminator)

        tp = (d_real.data > 0.5).sum()
        fp = (d_fake.data > 0.5).sum()
        fn = (d_real.data <= 0.5).sum()
        tn = (d_fake.data <= 0.5).sum()
        accuracy = (tp + tn) / (tp + fp + fn + tn)
        precision = tp / (tp + fp)
        recall = tp / (tp + fn)
        chainer.report({'accuracy': accuracy}, self.discriminator)
        chainer.report({'precision': precision}, self.discriminator)
        chainer.report({'recall': recall}, self.discriminator)
        return loss 
Example #14
Source File: updater.py    From pfio with MIT License 5 votes vote down vote up
def loss_dec(self, dec, x_out, t_out, y_out, lam1=100, lam2=1):
        batchsize, _, w, h = y_out.data.shape
        loss_rec = lam1*(F.mean_absolute_error(x_out, t_out))
        loss_adv = lam2*F.sum(F.softplus(-y_out)) / batchsize / w / h
        loss = loss_rec + loss_adv
        chainer.report({'loss': loss}, dec)
        return loss 
Example #15
Source File: updater.py    From pfio with MIT License 5 votes vote down vote up
def loss_enc(self, enc, x_out, t_out, y_out, lam1=100, lam2=1):
        batchsize, _, w, h = y_out.data.shape
        loss_rec = lam1*(F.mean_absolute_error(x_out, t_out))
        loss_adv = lam2*F.sum(F.softplus(-y_out)) / batchsize / w / h
        loss = loss_rec + loss_adv
        chainer.report({'loss': loss}, enc)
        return loss 
Example #16
Source File: models.py    From wavenet with Apache License 2.0 5 votes vote down vote up
def __call__(self, x, t, label):
         y = self.predictor(x, label)
         dims = self.xp.prod(np.array(y.shape[2:]))  # for CIFAR should be 3072

         nll = F.softmax_cross_entropy(y, t, normalize=True)
         chainer.report({'nll': nll, 'bits/dim': nll / dims}, self)
         return nll 
Example #17
Source File: updater.py    From pfio with MIT License 5 votes vote down vote up
def loss_dis(self, dis, y_in, y_out):
        batchsize, _, w, h = y_in.data.shape

        L1 = F.sum(F.softplus(-y_in)) / batchsize / w / h
        L2 = F.sum(F.softplus(y_out)) / batchsize / w / h
        loss = L1 + L2
        chainer.report({'loss': loss}, dis)
        return loss 
Example #18
Source File: loss.py    From Comicolorization with MIT License 5 votes vote down vote up
def make_loss(self, input, concat, target, test):
        output = self.forwarder(input, concat, test)['image']
        mae_loss = chainer.functions.mean_absolute_error(output, target)

        loss = {
            'mae': mae_loss,
        }
        chainer.report(loss, self.model)

        return {
            'main': loss,
        } 
Example #19
Source File: loss.py    From Comicolorization with MIT License 5 votes vote down vote up
def sum_loss(self, loss):
        sum_loss = self.blend_loss(loss, self.config.blend['main'])
        chainer.report({'sum_loss': sum_loss}, self.model)
        return sum_loss 
Example #20
Source File: training.py    From mesh_reconstruction with MIT License 5 votes vote down vote up
def validation(trainer=None, model=None, dataset=None):
    # evaluate voxel IoUs on all classes
    with chainer.configuration.using_config('train', False):
        ious = {}
        for class_id in dataset.class_ids:
            iou = 0
            for batch in dataset.get_all_batches_for_evaluation(100, class_id):
                batch = my_convertor(batch)
                iou += model.evaluate_iou(*batch).sum()
            iou /= dataset.num_data[class_id] * 24
            ious['%s/iou_%s' % (dataset.set_name, class_id)] = iou
        ious['%s/iou' % dataset.set_name] = np.mean([float(v) for v in ious.values()])
        chainer.report(ious) 
Example #21
Source File: test_reporter.py    From chainer with MIT License 5 votes vote down vote up
def test_not_keep_graph(self):
        x = chainer.Variable(numpy.array([1], numpy.float32))
        y = functions.sigmoid(x)
        reporter = chainer.Reporter()
        with self._scope(False):
            reporter.report({'y': y})
        self.assertIsNone(reporter.observation['y'].creator) 
Example #22
Source File: seq2seq_chainerio.py    From pfio with MIT License 5 votes vote down vote up
def forward(self, xs, ys):
        xs = [x[::-1] for x in xs]

        eos = self.xp.array([EOS], numpy.int32)
        ys_in = [F.concat([eos, y], axis=0) for y in ys]
        ys_out = [F.concat([y, eos], axis=0) for y in ys]

        # Both xs and ys_in are lists of arrays.
        exs = sequence_embed(self.embed_x, xs)
        eys = sequence_embed(self.embed_y, ys_in)

        batch = len(xs)
        # None represents a zero vector in an encoder.
        hx, cx, _ = self.encoder(None, None, exs)
        _, _, os = self.decoder(hx, cx, eys)

        # It is faster to concatenate data before calculating loss
        # because only one matrix multiplication is called.
        concat_os = F.concat(os, axis=0)
        concat_ys_out = F.concat(ys_out, axis=0)
        loss = F.sum(F.softmax_cross_entropy(
            self.W(concat_os), concat_ys_out, reduce='no')) / batch

        chainer.report({'loss': loss.data}, self)
        n_words = concat_ys_out.shape[0]
        perp = self.xp.exp(loss.data * batch / n_words)
        chainer.report({'perp': perp}, self)
        return loss 
Example #23
Source File: reporter.py    From chainer with MIT License 5 votes vote down vote up
def report_scope(observation):
    """Returns a report scope with the current reporter.

    This is equivalent to ``get_current_reporter().scope(observation)``,
    except that it does not make the reporter current redundantly.

    """
    current = _get_reporters()[-1]
    old = current.observation
    current.observation = observation
    yield
    current.observation = old 
Example #24
Source File: test_evaluator.py    From chainer with MIT License 5 votes vote down vote up
def forward(self, x, y):
        self.args.append((x, y))
        with chainer.using_device(backend.get_device_from_array(x, y)):
            chainer.report({'loss': x.sum() + y.sum()}, self) 
Example #25
Source File: test_evaluator.py    From chainer with MIT License 5 votes vote down vote up
def forward(self, x):
        self.args.append(x)
        chainer.report({'loss': x.sum()}, self) 
Example #26
Source File: test_reporter.py    From chainer with MIT License 5 votes vote down vote up
def test_report_scope(self):
        reporter = chainer.Reporter()
        observation = {}

        with reporter:
            with chainer.report_scope(observation):
                chainer.report({'x': 1})

        self.assertIn('x', observation)
        self.assertEqual(observation['x'], 1)
        self.assertNotIn('x', reporter.observation) 
Example #27
Source File: test_reporter.py    From chainer with MIT License 5 votes vote down vote up
def test_report_with_unregistered_observer(self):
        reporter = chainer.Reporter()
        observer = object()
        with reporter:
            with self.assertRaises(KeyError):
                chainer.report({'x': 1}, observer) 
Example #28
Source File: test_reporter.py    From chainer with MIT License 5 votes vote down vote up
def test_report(self):
        reporter = chainer.Reporter()
        with reporter:
            chainer.report({'x': 1})
        observation = reporter.observation
        self.assertIn('x', observation)
        self.assertEqual(observation['x'], 1) 
Example #29
Source File: test_reporter.py    From chainer with MIT License 5 votes vote down vote up
def test_report_without_reporter(self):
        observer = object()
        chainer.report({'x': 1}, observer) 
Example #30
Source File: test_reporter.py    From chainer with MIT License 5 votes vote down vote up
def test_keep_graph(self):
        x = chainer.Variable(numpy.array([1], numpy.float32))
        y = functions.sigmoid(x)
        reporter = chainer.Reporter()
        with self._scope(True):
            reporter.report({'y': y})
        assert reporter.observation['y'].creator is not None