Python chainer.serializers.load_hdf5() Examples
The following are 20
code examples of chainer.serializers.load_hdf5().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer.serializers
, or try the search function
.
Example #1
Source File: train.py From ssai-cnn with MIT License | 6 votes |
def get_model(args): model_fn = os.path.basename(args.model) model = imp.load_source(model_fn.split('.')[0], args.model).model if 'result_dir' in args: dst = '%s/%s' % (args.result_dir, model_fn) if not os.path.exists(dst): shutil.copy(args.model, dst) dst = '%s/%s' % (args.result_dir, os.path.basename(__file__)) if not os.path.exists(dst): shutil.copy(__file__, dst) # load model if args.resume_model is not None: serializers.load_hdf5(args.resume_model, model) # prepare model if args.gpu >= 0: model.to_gpu() return model
Example #2
Source File: yolov2_predict.py From YOLOv2 with MIT License | 6 votes |
def __init__(self): # hyper parameters weight_file = "./backup/yolov2_final_cpu.model" self.n_classes = 10 self.n_boxes = 5 self.detection_thresh = 0.3 self.iou_thresh = 0.3 self.label_file = "./data/label.txt" with open(self.label_file, "r") as f: self.labels = f.read().strip().split("\n") # load model print("loading animal model...") yolov2 = YOLOv2(n_classes=self.n_classes, n_boxes=self.n_boxes) model = YOLOv2Predictor(yolov2) serializers.load_hdf5(weight_file, model) # load saved model model.predictor.train = False model.predictor.finetune = False self.model = model
Example #3
Source File: yolov2_darknet_predict.py From YOLOv2 with MIT License | 6 votes |
def __init__(self): # hyper parameters weight_file = "./yolov2_darknet.model" self.n_classes = 80 self.n_boxes = 5 self.detection_thresh = 0.5 self.iou_thresh = 0.5 self.labels = ["person","bicycle","car","motorcycle","airplane","bus","train","truck","boat","traffic light","fire hydrant","stop sign","parking meter","bench","bird","cat","dog","horse","sheep","cow","elephant","bear","zebra","giraffe","backpack","umbrella","handbag","tie","suitcase","frisbee","skis","snowboard","sports ball","kite","baseball bat","baseball glove","skateboard","surfboard","tennis racket","bottle","wine glass","cup","fork","knife","spoon","bowl","banana","apple","sandwich","orange","broccoli","carrot","hot dog","pizza","donut","cake","chair","couch","potted plant","bed","dining table","toilet","tv","laptop","mouse","remote","keyboard","cell phone","microwave","oven","toaster","sink","refrigerator","book","clock","vase","scissors","teddy bear","hair drier","toothbrush"] anchors = [[0.738768, 0.874946], [2.42204, 2.65704], [4.30971, 7.04493], [10.246, 4.59428], [12.6868, 11.8741]] # load model print("loading coco model...") yolov2 = YOLOv2(n_classes=self.n_classes, n_boxes=self.n_boxes) serializers.load_hdf5(weight_file, yolov2) # load saved model model = YOLOv2Predictor(yolov2) model.init_anchor(anchors) model.predictor.train = False model.predictor.finetune = False self.model = model
Example #4
Source File: chainer_seq2seq_att.py From seq2seq_temporal_attention with MIT License | 6 votes |
def test_batch(model, test_data, vocab, inv_vocab, modelfile_to_load): print('Testing (beam size = 1)...') print('print output to file: {}'.format(out_test_filename)) serializers.load_hdf5(modelfile_to_load, model) batch_test = \ utils_seq2seq.gen_batch_test(test_data, args.feature, params.batch_size_val, vocab, xp) caption_out = [] output_file = open(out_test_filename, mode='w') for vid_batch_test, caption_batch_test, id_batch_test in batch_test: output_test = forward(model, params, vocab, inv_vocab, vid_batch_test, caption_batch_test, 'test-on-train', args.batchsizeval) for ii in range(args.batchsizeval): caption_out.append({'image_id': id_batch_test[ii], 'caption': output_test[ii]}) print('%s %s' % (id_batch_test[ii], output_test[ii])) output_file.write(id_batch_test[ii] + '\t' + output_test[ii] + '\n') output_file.close() with open(eval_test_filename, mode='w') as f: json.dump(caption_out, f) eval_coco.eval_coco(args.cocotest, eval_test_filename)
Example #5
Source File: chainer_seq2seq_att.py From seq2seq_temporal_attention with MIT License | 6 votes |
def test(model, test_data, vocab, inv_vocab, modelfile_to_load, params): print('Testing ...') print('Beam size: {}'.format(params.beam_size)) print('print output to file:', out_test_filename) serializers.load_hdf5(modelfile_to_load, model) batch_test = utils_seq2seq.gen_batch_test(test_data, args.feature, 1, vocab, xp) output_file = open(out_test_filename, mode='w') for vid_batch, caption_batch, id_batch in batch_test: output = predict(model, params, vocab, inv_vocab, vid_batch, batch_size=1, beam_size=params.beam_size) print('%s %s' % (id_batch[0], output)) output_file.write(id_batch[0] + '\t' + output + '\n') output_file.close() utils_coco.convert(out_test_filename, eval_test_filename) eval_coco.eval_coco(args.cocotest, eval_test_filename)
Example #6
Source File: resnet152.py From deel with MIT License | 5 votes |
def __init__(self,modelpath='ResNet-152-model.caffemodel', mean='ilsvrc_2012_mean.npy', labels='misc/labels.txt',in_size=224, tuning_layer='fc1000'): super(ResNet152,self).__init__('ResNet152',in_size) if os.path.splitext(modelpath)[1]==".caffemodel": self.func = LoadCaffeModel(modelpath) else: self.func = LoadCaffeModel("ResNet-152-model.caffemodel") cs.load_hdf5(modelpath,self.func) xp = Deel.xp ImageNet.mean_image = np.ndarray((3, 256, 256), dtype=np.float32) ImageNet.mean_image[0] = 104 ImageNet.mean_image[1] = 117 ImageNet.mean_image[2] = 123 ImageNet.in_size = in_size self.labels = np.loadtxt(labels, str, delimiter="\t") self.batchsize = 1 self.x_batch = xp.ndarray((self.batchsize, 3, self.in_size, self.in_size), dtype=np.float32) if Deel.gpu >=0: self.func = self.func.to_gpu(Deel.gpu) #self.optimizer = optimizers.MomentumSGD(lr=0.01,momentum=0.9) self.optimizer = optimizers.RMSpropGraves() #self.optimizer.setup(self.func.fc1000) self.optimizer.setup(self.func[tuning_layer])
Example #7
Source File: invert.py From ssai-cnn with MIT License | 5 votes |
def load_model(self): model_fn = os.path.basename(self.args.model) self.model = imp.load_source( model_fn.split('.')[0], self.args.model).model self.model.train = False serializers.load_hdf5(self.args.param, self.model) if self.args.gpu >= 0: self.model.to_gpu()
Example #8
Source File: rnin.py From deel with MIT License | 5 votes |
def __init__(self,modelpath=None,mean='misc/ilsvrc_2012_mean.npy',labels='data/labels.txt',optimizer=None): super(RegionalNetworkInNetwork,self).__init__('RegionalNetworkInNetwork',in_size=227) self.func = deel.model.rnin.RNIN() if modelpath is not None: cs.load_hdf5("misc/"+modelpath,self.func) self.graph_generated=None xp = Deel.xp #ImageNet.mean_image = pickle.load(open(mean, 'rb')) ImageNet.mean_image = np.ndarray((3, 256, 256), dtype=xp.float32) ImageNet.mean_image[0] = 104 ImageNet.mean_image[1] = 117 ImageNet.mean_image[2] = 123 ImageNet.in_size = self.func.insize self.labels = np.loadtxt(labels, str, delimiter="\t") self.t = ChainerTensor(Variable(Deel.xp.asarray([1.0]))) if Deel.gpu>=0: self.func.to_gpu() if optimizer is None: self.optimizer = optimizers.Adam() self.optimizer.setup(self.func)
Example #9
Source File: googlenet.py From deel with MIT License | 5 votes |
def __init__(self,modelpath='bvlc_googlenet.caffemodel', mean='ilsvrc_2012_mean.npy', labels='misc/labels.txt',in_size=224): super(GoogLeNet,self).__init__('GoogLeNet',in_size) if os.path.splitext(modelpath)[1]==".caffemodel": self.func = LoadCaffeModel(modelpath) self.model = convert(self.func) else: self.func=None self.model = chainermodel.GoogLeNet() cs.load_hdf5(modelpath,self.model) xp = Deel.xp ImageNet.mean_image = np.ndarray((3, 256, 256), dtype=np.float32) ImageNet.mean_image[0] = 103.939 ImageNet.mean_image[1] = 116.779 ImageNet.mean_image[2] = 123.68 ImageNet.in_size = in_size #print type(ImageNet.mean_image) self.labels = np.loadtxt(labels, str, delimiter="\t") self.batchsize = 1 self.x_batch = xp.ndarray((self.batchsize, 3, self.in_size, self.in_size), dtype=np.float32) if Deel.gpu >=0: self.model = self.model.to_gpu(Deel.gpu) self.optimizer = optimizers.MomentumSGD(lr=0.01,momentum=0.9) #self.optimizer = optimizers.Adam() #self.optimizer.setup(self.func) self.optimizer.setup(self.model)
Example #10
Source File: nin.py From deel with MIT License | 5 votes |
def __init__(self,modelpath=None,mean='misc/ilsvrc_2012_mean.npy',labels='data/labels.txt',optimizer=None): super(NetworkInNetwork,self).__init__('NetworkInNetwork',in_size=227) self.func = deel.model.nin.NIN() if modelpath is not None: cs.load_hdf5("misc/"+modelpath,self.func) self.graph_generated=None xp = Deel.xp #ImageNet.mean_image = pickle.load(open(mean, 'rb')) ImageNet.mean_image = np.ndarray((3, 256, 256), dtype=xp.float32) ImageNet.mean_image[0] = 104 ImageNet.mean_image[1] = 117 ImageNet.mean_image[2] = 123 ImageNet.in_size = self.func.insize self.labels = np.loadtxt(labels, str, delimiter="\t") self.t = ChainerTensor(Variable(Deel.xp.asarray([1.0]))) if Deel.gpu>=0: self.func.to_gpu() if optimizer is None: self.optimizer = optimizers.Adam() self.optimizer.setup(self.func)
Example #11
Source File: evaluation.py From chainer-gan-lib with MIT License | 5 votes |
def load_inception_model(): infile = "%s/inception/inception_score.model"%os.path.dirname(__file__) model = Inception() serializers.load_hdf5(infile, model) model.to_gpu() return model
Example #12
Source File: evaluation.py From chainer-gan-lib with MIT License | 5 votes |
def load_inception_model(): infile = "%s/../common/inception/inception_score.model"%os.path.dirname(__file__) model = Inception() serializers.load_hdf5(infile, model) model.to_gpu() return model
Example #13
Source File: ddqn.py From double-dqn with MIT License | 5 votes |
def load(self): filename = "conv.model" if os.path.isfile(filename): serializers.load_hdf5(filename, self.conv) print "convolutional network loaded." if self.fcl_eliminated is False: filename = "fc.model" if os.path.isfile(filename): serializers.load_hdf5(filename, self.fc) print "fully-connected network loaded."
Example #14
Source File: serializer.py From chainer with MIT License | 5 votes |
def save_and_load_hdf5(src, dst): """Saves ``src`` to an HDF5 file and loads it to ``dst``. This is a short cut of :func:`save_and_load` using HDF5 de/serializers. Args: src: An object to save. dst: An object to load to. """ save_and_load(src, dst, 'tmp.h5', serializers.save_hdf5, serializers.load_hdf5)
Example #15
Source File: a3c.py From async-rl with MIT License | 5 votes |
def load_model(self, model_filename): """Load a network model form a file """ serializers.load_hdf5(model_filename, self.model) copy_param.copy_param(target_link=self.model, source_link=self.shared_model) opt_filename = model_filename + '.opt' if os.path.exists(opt_filename): print('WARNING: {0} was not found, so loaded only a model'.format( opt_filename)) serializers.load_hdf5(model_filename + '.opt', self.optimizer)
Example #16
Source File: invert_diff.py From ssai-cnn with MIT License | 5 votes |
def load_model(self): model_fn = os.path.basename(self.args.model) self.model = imp.load_source( model_fn.split('.')[0], self.args.model).model self.model.train = False serializers.load_hdf5(self.args.param, self.model) if self.args.gpu >= 0: self.model.to_gpu()
Example #17
Source File: train.py From ssai-cnn with MIT License | 5 votes |
def get_model_optimizer(args): model = get_model(args) if 'opt' in args: # prepare optimizer if args.opt == 'MomentumSGD': optimizer = optimizers.MomentumSGD(lr=args.lr, momentum=0.9) elif args.opt == 'Adam': optimizer = optimizers.Adam(alpha=args.alpha) elif args.opt == 'AdaGrad': optimizer = optimizers.AdaGrad(lr=args.lr) else: raise Exception('No optimizer is selected') optimizer.setup(model) if args.opt == 'MomentumSGD': optimizer.add_hook( chainer.optimizer.WeightDecay(args.weight_decay)) if args.resume_opt is not None: serializers.load_hdf5(args.resume_opt, optimizer) args.epoch_offset = int( re.search('epoch-([0-9]+)', args.resume_opt).groups()[0]) return model, optimizer else: print('No optimizer generated.') return model
Example #18
Source File: chainer_seq2seq_att.py From seq2seq_temporal_attention with MIT License | 4 votes |
def eval(args, params): train_data = utils_corpus.get_dataset(args.train, 'train') val_data = utils_corpus.get_dataset(args.val, 'val') test_data = utils_corpus.get_dataset(args.test, 'test') vocab, inv_vocab = utils_seq2seq.read_vocab(args.vocab) num_vocab = len(vocab) print('training data size: {}'.format(len(train_data))) print('validating data size: {}'.format(len(val_data))) print('test data size: {}'.format(len(test_data))) print('no. vocabs in training data: {}'.format(len(vocab))) model = S2S_att.S2S_att(params.input_size, num_vocab, params.embed_size, params.hidden_size, args.align) if params.gpu >= 0: model.to_gpu() score_json = [vars(args)] with open(score_filename, mode='w') as f: json.dump(score_json, f) if args.mode == 'train': train(model, train_data, val_data, vocab, inv_vocab, params, score_json) elif args.mode == 'test': test(model, test_data, vocab, inv_vocab, modelfile_to_load=args.model, params=params) elif args.mode == 'test-batch': test_batch(model, test_data, vocab, inv_vocab, modelfile_to_load=args.model) else: modelfile_to_load = args.model serializers.load_hdf5(modelfile_to_load, model) with h5py.File(args.feature, mode='r') as f: features = [] for k in f.keys(): features.append(xp.array(f[k], dtype=xp.float32)) output = predict(model, params, vocab, inv_vocab, features, batch_size=1, beam_size=params.beam_size) print(output)
Example #19
Source File: demo_a3c_doom.py From async-rl with MIT License | 4 votes |
def main(): import logging logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() parser.add_argument('model', type=str) parser.add_argument('--seed', type=int, default=0) parser.add_argument('--sleep', type=float, default=0) parser.add_argument('--scenario', type=str, default='basic') parser.add_argument('--n-runs', type=int, default=10) parser.add_argument('--use-lstm', action='store_true') parser.add_argument('--window-visible', action='store_true') parser.add_argument('--deterministic', action='store_true') parser.add_argument('--random', action='store_true') parser.set_defaults(window_visible=False) parser.set_defaults(use_lstm=False) parser.set_defaults(deterministic=False) parser.set_defaults(random=False) args = parser.parse_args() random_seed.set_random_seed(args.seed) n_actions = doom_env.DoomEnv( window_visible=False, scenario=args.scenario).n_actions if not args.random: if args.use_lstm: model = A3CLSTM(n_actions) else: model = A3CFF(n_actions) serializers.load_hdf5(args.model, model) scores = [] env = doom_env.DoomEnv(window_visible=args.window_visible, scenario=args.scenario, sleep=args.sleep) for i in range(args.n_runs): if args.random: score = eval_single_random_run(env) else: score = eval_single_run( env, model, phi, deterministic=args.deterministic) print('Run {}: {}'.format(i, score)) scores.append(score) print('Average: {}'.format(sum(scores) / args.n_runs))
Example #20
Source File: demo_a3c_ale.py From async-rl with MIT License | 4 votes |
def main(): import logging logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() parser.add_argument('rom', type=str) parser.add_argument('model', type=str) parser.add_argument('--seed', type=int, default=0) parser.add_argument('--use-sdl', action='store_true') parser.add_argument('--n-runs', type=int, default=10) parser.add_argument('--deterministic', action='store_true') parser.add_argument('--record-screen-dir', type=str, default=None) parser.add_argument('--use-lstm', action='store_true') parser.set_defaults(use_sdl=False) parser.set_defaults(use_lstm=False) parser.set_defaults(deterministic=False) args = parser.parse_args() random_seed.set_random_seed(args.seed) n_actions = ale.ALE(args.rom).number_of_actions # Load an A3C-DQN model if args.use_lstm: model = A3CLSTM(n_actions) else: model = A3CFF(n_actions) serializers.load_hdf5(args.model, model) scores = [] for i in range(args.n_runs): episode_record_dir = None if args.record_screen_dir is not None: episode_record_dir = os.path.join(args.record_screen_dir, str(i)) os.makedirs(episode_record_dir) score = eval_performance( args.rom, model, deterministic=args.deterministic, use_sdl=args.use_sdl, record_screen_dir=episode_record_dir) print('Run {}: {}'.format(i, score)) scores.append(score) print('Average: {}'.format(sum(scores) / args.n_runs))