Python chainer.serializers.save_hdf5() Examples

The following are 9 code examples of chainer.serializers.save_hdf5(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer.serializers , or try the search function .
Example #1
Source File: a3c.py    From async-rl with MIT License 5 votes vote down vote up
def save_model(self, model_filename):
        """Save a network model to a file
        """
        serializers.save_hdf5(model_filename, self.model)
        serializers.save_hdf5(model_filename + '.opt', self.optimizer) 
Example #2
Source File: serializer.py    From chainer with MIT License 5 votes vote down vote up
def save_and_load_hdf5(src, dst):
    """Saves ``src`` to an HDF5 file and loads it to ``dst``.

    This is a short cut of :func:`save_and_load` using HDF5 de/serializers.

    Args:
        src: An object to save.
        dst: An object to load to.

    """
    save_and_load(src, dst, 'tmp.h5',
                  serializers.save_hdf5, serializers.load_hdf5) 
Example #3
Source File: ddqn.py    From double-dqn with MIT License 5 votes vote down vote up
def save(self):
		serializers.save_hdf5("conv.model", self.conv)
		if self.fcl_eliminated is False:
			serializers.save_hdf5("fc.model", self.fc) 
Example #4
Source File: nin.py    From deel with MIT License 5 votes vote down vote up
def save(self,filename):
		cs.save_hdf5(filename,self.func.copy().to_cpu()) 
Example #5
Source File: googlenet.py    From deel with MIT License 5 votes vote down vote up
def save(self,filename):
		cs.save_hdf5(filename,self.model.copy().to_cpu()) 
Example #6
Source File: rnin.py    From deel with MIT License 5 votes vote down vote up
def save(self,filename):
		#cs.save_hdf5(filename,self.func.copy().to_cpu())
		cs.save_hdf5(filename,self.func.copy()) 
Example #7
Source File: resnet152.py    From deel with MIT License 5 votes vote down vote up
def save(self,filename):
		cs.save_hdf5(filename,self.func.to_cpu()) 
Example #8
Source File: caffenet.py    From deel with MIT License 5 votes vote down vote up
def save(self,filename):
		cs.save_hdf5(filename,self.func.to_cpu()) 
Example #9
Source File: train.py    From chainer-image-caption with MIT License 4 votes vote down vote up
def train(epoch_num):
    image_groups, sentence_groups = make_groups(train_image_ids, train_sentences)
    test_image_groups, test_sentence_groups = make_groups(test_image_ids, test_sentences, train=False)
    for epoch in range(epoch_num):
        batches = random_batches(image_groups, sentence_groups)
        sum_loss = 0
        sum_acc = 0
        sum_size = 0
        batch_num = len(batches)
        for i, (image_id_batch, sentence_batch) in enumerate(batches):
            loss, acc, size = forward(caption_net, images[image_id_batch], sentence_batch)
            caption_net.cleargrads()
            loss.backward()
            loss.unchain_backward()
            optimizer.update()
            sentence_length = sentence_batch.shape[1]
            sum_loss += float(loss.data) * size
            sum_acc += acc * size
            sum_size += size
            if (i + 1) % 500 == 0:
                print '{} / {} loss: {} accuracy: {}'.format(i + 1, batch_num, sum_loss / sum_size, sum_acc / sum_size)
        print 'epoch: {} done'.format(epoch + 1)
        print 'train loss: {} accuracy: {}'.format(sum_loss / sum_size, sum_acc / sum_size)
        sum_loss = 0
        sum_acc = 0
        sum_size = 0
        for image_ids, sentences in zip(test_image_groups, test_sentence_groups):
            if len(sentences) == 0:
                continue
            size = len(sentences)
            for i in range(0, size, batch_size):
                image_id_batch = image_ids[i:i + batch_size]
                sentence_batch = sentences[i:i + batch_size]
                loss, acc, size = forward(caption_net, images[image_id_batch], sentence_batch, train=False)
                sentence_length = sentence_batch.shape[1]
                sum_loss += float(loss.data) * size
                sum_acc += acc * size
                sum_size += size
        print 'test loss: {} accuracy: {}'.format(sum_loss / sum_size, sum_acc / sum_size)

        serializers.save_hdf5(args.output + '_{0:04d}.model'.format(epoch), caption_net)
        serializers.save_hdf5(args.output + '_{0:04d}.state'.format(epoch), optimizer)