Python keras.models.save_model() Examples
The following are 7
code examples of keras.models.save_model().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras.models
, or try the search function
.
Example #1
Source File: NeuMF_RecommenderWrapper.py From RecSys2019_DeepLearning_Evaluation with GNU Affero General Public License v3.0 | 6 votes |
def save_model(self, folder_path, file_name = None): if file_name is None: file_name = self.RECOMMENDER_NAME self._print("Saving model in file '{}'".format(folder_path + file_name)) self.model.save_weights(folder_path + file_name + "_weights", overwrite=True) data_dict_to_save = { "n_users": self.n_users, "n_items": self.n_items, "mf_dim": self.mf_dim, "layers": self.layers, "reg_layers": self.reg_layers, "reg_mf": self.reg_mf, } dataIO = DataIO(folder_path=folder_path) dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save) self._print("Saving complete")
Example #2
Source File: training.py From deep_complex_networks with MIT License | 5 votes |
def on_epoch_end(self, epoch, logs={}): if (epoch + 1) % self.period_of_epochs == 0: # Filenames baseHDF5Filename = "ModelChkpt{:06d}.hdf5".format(epoch+1) baseYAMLFilename = "ModelChkpt{:06d}.yaml".format(epoch+1) hdf5Filename = os.path.join(self.chkptsdir, baseHDF5Filename) yamlFilename = os.path.join(self.chkptsdir, baseYAMLFilename) # YAML yamlModel = self.model.to_yaml() with open(yamlFilename, "w") as yamlFile: yamlFile.write(yamlModel) # HDF5 KM.save_model(self.model, hdf5Filename) with H.File(hdf5Filename, "r+") as f: f.require_dataset("initialEpoch", (), "uint64", True)[...] = int(epoch+1) f.flush() # Symlink to new HDF5 file, then atomically rename and replace. os.symlink(baseHDF5Filename, self.linkFilename+".rename") os.rename (self.linkFilename+".rename", self.linkFilename) # Print L.getLogger("train").info("Saved checkpoint to {:s} at epoch {:5d}".format(hdf5Filename, epoch+1)) # # Save record-best models. #
Example #3
Source File: training.py From deep_complex_networks with MIT License | 5 votes |
def on_epoch_end(self, epoch, logs={}): val_loss = logs['loss'] val_acc = logs['acc'] if val_acc > self.best_acc: self.best_acc = val_acc self.best_loss = val_loss # Filenames hdf5Filename = os.path.join(self.bestdir, "Bestmodel_{:06d}_{:.4f}_{:.4f}.hdf5".format(epoch+1, val_acc, val_loss)) yamlFilename = os.path.join(self.bestdir, "Bestmodel_{:06d}_{:.4f}_{:.4f}.yaml".format(epoch+1, val_acc, val_loss)) # YAML yamlModel = self.model.to_yaml() with open(yamlFilename, "w") as yamlFile: yamlFile.write(yamlModel) # HDF5 KM.save_model(self.model, hdf5Filename) with H.File(hdf5Filename, "r+") as f: f.require_dataset("initialEpoch", (), "uint64", True)[...] = int(epoch+1) f.flush() # Print L.getLogger("train").info("Saved best model to {:s} at epoch {:5d}".format(hdf5Filename, epoch+1)) # # ResNet Learning-rate Schedules. #
Example #4
Source File: kerashack.py From betago with MIT License | 5 votes |
def save_model_to_hdf5_group(model, f): # Use Keras save_model to save the full model (including optimizer # state) to a file. # Then we can embed the contents of that HDF5 file inside ours. tempfd, tempfname = tempfile.mkstemp(prefix='tmp-betago') try: os.close(tempfd) save_model(model, tempfname) serialized_model = h5py.File(tempfname, 'r') root_item = serialized_model.get('/') serialized_model.copy(root_item, f, 'kerasmodel') serialized_model.close() finally: os.unlink(tempfname)
Example #5
Source File: tensorflow_bind.py From trains with Apache License 2.0 | 5 votes |
def _patch_io_calls(Network, Sequential, keras_saving): try: if Sequential is not None: Sequential._updated_config = _patched_call(Sequential._updated_config, PatchKerasModelIO._updated_config) if hasattr(Sequential.from_config, '__func__'): # noinspection PyUnresolvedReferences Sequential.from_config = classmethod(_patched_call(Sequential.from_config.__func__, PatchKerasModelIO._from_config)) else: Sequential.from_config = _patched_call(Sequential.from_config, PatchKerasModelIO._from_config) if Network is not None: Network._updated_config = _patched_call(Network._updated_config, PatchKerasModelIO._updated_config) if hasattr(Sequential.from_config, '__func__'): # noinspection PyUnresolvedReferences Network.from_config = classmethod(_patched_call(Network.from_config.__func__, PatchKerasModelIO._from_config)) else: Network.from_config = _patched_call(Network.from_config, PatchKerasModelIO._from_config) Network.save = _patched_call(Network.save, PatchKerasModelIO._save) Network.save_weights = _patched_call(Network.save_weights, PatchKerasModelIO._save_weights) Network.load_weights = _patched_call(Network.load_weights, PatchKerasModelIO._load_weights) if keras_saving is not None: keras_saving.save_model = _patched_call(keras_saving.save_model, PatchKerasModelIO._save_model) keras_saving.load_model = _patched_call(keras_saving.load_model, PatchKerasModelIO._load_model) except Exception as ex: LoggerRoot.get_base_logger(TensorflowBinding).warning(str(ex))
Example #6
Source File: tensorflow_bind.py From trains with Apache License 2.0 | 5 votes |
def _save(original_fn, self, *args, **kwargs): if hasattr(self, 'trains_out_model'): self.trains_out_model._processed = False original_fn(self, *args, **kwargs) # no need to specially call, because the original save uses "save_model" which we overload if not hasattr(self, 'trains_out_model') or not self.trains_out_model._processed: PatchKerasModelIO._update_outputmodel(self, *args, **kwargs)
Example #7
Source File: MCRecRecommenderWrapper.py From RecSys2019_DeepLearning_Evaluation with GNU Affero General Public License v3.0 | 5 votes |
def save_model(self, folder_path, file_name = None): if file_name is None: file_name = self.RECOMMENDER_NAME self._print("Saving model in file '{}'".format(folder_path + file_name)) self.model.save_weights(folder_path + file_name + "_weights", overwrite=True) data_dict_to_save = { "n_users": self.n_users, "n_items": self.n_items, "path_nums": self.path_nums, "timestamps": self.timestamps, "length": self.length, "layers": self.layers, "reg_layes": self.reg_layes, "latent_dim": self.latent_dim, "reg_latent": self.reg_latent, "learning_rate": self.learning_rate, } dataIO = DataIO(folder_path=folder_path) dataIO.save_data(file_name=file_name, data_dict_to_save = data_dict_to_save) self._print("Saving complete")