Python keras.optimizers.serialize() Examples
The following are 13
code examples of keras.optimizers.serialize().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras.optimizers
, or try the search function
.
Example #1
Source File: workers.py From dist-keras with GNU General Public License v3.0 | 6 votes |
def __init__(self, model, optimizer, loss, loss_weights, metrics=["accuracy"], features_col="features", label_col="label", batch_size=32, num_epoch=1, learning_rate=1.0): assert isinstance(optimizer, (str, Optimizer)), "'optimizer' must be a string or a Keras Optimizer instance" assert isinstance(features_col, (str, list)), "'features_col' must be a string or a list of strings" assert isinstance(label_col, (str, list)), "'label_col' must be a string or a list of strings" self.model = model self.optimizer = {'class_name': optimizer, 'config': {}} if isinstance(optimizer, str) else serialize(optimizer) self.loss = loss self.loss_weights = loss_weights self.metrics= metrics self.features_column = [features_col] if isinstance(features_col, str) else features_col self.label_column = [label_col] if isinstance(label_col, str) else label_col self.batch_size = batch_size self.num_epoch = num_epoch self.max_mini_batches = 100 self.prefetching_thread = None self.mini_batches = None self.is_prefetching = True self.worker_id = -1 self.learning_rate = learning_rate self.num_inputs = len(self.features_column) self.num_outputs = len(self.label_column) self.current_epoch = 0
Example #2
Source File: test_ml_model.py From elephas with MIT License | 5 votes |
def test_spark_ml_model(spark_context): df = to_data_frame(spark_context, x_train, y_train, categorical=True) test_df = to_data_frame(spark_context, x_test, y_test, categorical=True) sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) sgd_conf = optimizers.serialize(sgd) # Initialize Spark ML Estimator estimator = ElephasEstimator() estimator.set_keras_model_config(model.to_yaml()) estimator.set_optimizer_config(sgd_conf) estimator.set_mode("synchronous") estimator.set_loss("categorical_crossentropy") estimator.set_metrics(['acc']) estimator.set_epochs(epochs) estimator.set_batch_size(batch_size) estimator.set_validation_split(0.1) estimator.set_categorical_labels(True) estimator.set_nb_classes(nb_classes) # Fitting a model returns a Transformer pipeline = Pipeline(stages=[estimator]) fitted_pipeline = pipeline.fit(df) # Evaluate Spark model by evaluating the underlying model prediction = fitted_pipeline.transform(test_df) pnl = prediction.select("label", "prediction") pnl.show(100) prediction_and_label = pnl.rdd.map(lambda row: (row.label, row.prediction)) metrics = MulticlassMetrics(prediction_and_label) print(metrics.precision()) print(metrics.recall())
Example #3
Source File: optimizers.py From keras-contrib with MIT License | 5 votes |
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() model = get_model(x_train.shape[1], 10, y_train.shape[1]) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) assert history.history['acc'][-1] >= target config = optimizers.serialize(optimizer) custom_objects = {optimizer.__class__.__name__: optimizer.__class__} optim = optimizers.deserialize(config, custom_objects) new_config = optimizers.serialize(optim) assert config == new_config
Example #4
Source File: optimizers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() model = Sequential() model.add(Dense(10, input_shape=(x_train.shape[1],))) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) assert history.history['acc'][-1] >= target config = optimizers.serialize(optimizer) optim = optimizers.deserialize(config) new_config = optimizers.serialize(optim) new_config['class_name'] = new_config['class_name'].lower() assert config == new_config # Test constraints. model = Sequential() dense = Dense(10, input_shape=(x_train.shape[1],), kernel_constraint=lambda x: 0. * x + 1., bias_constraint=lambda x: 0. * x + 2.,) model.add(dense) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.train_on_batch(x_train[:10], y_train[:10]) kernel, bias = dense.get_weights() assert_allclose(kernel, 1.) assert_allclose(bias, 2.)
Example #5
Source File: optimizers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() model = Sequential() model.add(Dense(10, input_shape=(x_train.shape[1],))) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) assert history.history['acc'][-1] >= target config = optimizers.serialize(optimizer) optim = optimizers.deserialize(config) new_config = optimizers.serialize(optim) new_config['class_name'] = new_config['class_name'].lower() assert config == new_config # Test constraints. model = Sequential() dense = Dense(10, input_shape=(x_train.shape[1],), kernel_constraint=lambda x: 0. * x + 1., bias_constraint=lambda x: 0. * x + 2.,) model.add(dense) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.train_on_batch(x_train[:10], y_train[:10]) kernel, bias = dense.get_weights() assert_allclose(kernel, 1.) assert_allclose(bias, 2.)
Example #6
Source File: optimizers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() model = Sequential() model.add(Dense(10, input_shape=(x_train.shape[1],))) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) assert history.history['acc'][-1] >= target config = optimizers.serialize(optimizer) optim = optimizers.deserialize(config) new_config = optimizers.serialize(optim) new_config['class_name'] = new_config['class_name'].lower() assert config == new_config # Test constraints. model = Sequential() dense = Dense(10, input_shape=(x_train.shape[1],), kernel_constraint=lambda x: 0. * x + 1., bias_constraint=lambda x: 0. * x + 2.,) model.add(dense) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.train_on_batch(x_train[:10], y_train[:10]) kernel, bias = dense.get_weights() assert_allclose(kernel, 1.) assert_allclose(bias, 2.)
Example #7
Source File: optimizers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() model = Sequential() model.add(Dense(10, input_shape=(x_train.shape[1],))) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) assert history.history['acc'][-1] >= target config = optimizers.serialize(optimizer) optim = optimizers.deserialize(config) new_config = optimizers.serialize(optim) new_config['class_name'] = new_config['class_name'].lower() assert config == new_config # Test constraints. model = Sequential() dense = Dense(10, input_shape=(x_train.shape[1],), kernel_constraint=lambda x: 0. * x + 1., bias_constraint=lambda x: 0. * x + 2.,) model.add(dense) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.train_on_batch(x_train[:10], y_train[:10]) kernel, bias = dense.get_weights() assert_allclose(kernel, 1.) assert_allclose(bias, 2.)
Example #8
Source File: optimizers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() model = Sequential() model.add(Dense(10, input_shape=(x_train.shape[1],))) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) assert history.history['acc'][-1] >= target config = optimizers.serialize(optimizer) optim = optimizers.deserialize(config) new_config = optimizers.serialize(optim) new_config['class_name'] = new_config['class_name'].lower() assert config == new_config # Test constraints. model = Sequential() dense = Dense(10, input_shape=(x_train.shape[1],), kernel_constraint=lambda x: 0. * x + 1., bias_constraint=lambda x: 0. * x + 2.,) model.add(dense) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.train_on_batch(x_train[:10], y_train[:10]) kernel, bias = dense.get_weights() assert_allclose(kernel, 1.) assert_allclose(bias, 2.)
Example #9
Source File: optimizers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() model = Sequential() model.add(Dense(10, input_shape=(x_train.shape[1],))) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) assert history.history['acc'][-1] >= target config = optimizers.serialize(optimizer) optim = optimizers.deserialize(config) new_config = optimizers.serialize(optim) new_config['class_name'] = new_config['class_name'].lower() assert config == new_config # Test constraints. model = Sequential() dense = Dense(10, input_shape=(x_train.shape[1],), kernel_constraint=lambda x: 0. * x + 1., bias_constraint=lambda x: 0. * x + 2.,) model.add(dense) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.train_on_batch(x_train[:10], y_train[:10]) kernel, bias = dense.get_weights() assert_allclose(kernel, 1.) assert_allclose(bias, 2.)
Example #10
Source File: optimizers_test.py From DeepLearning_Wavelet-LSTM with MIT License | 5 votes |
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() model = Sequential() model.add(Dense(10, input_shape=(x_train.shape[1],))) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) assert history.history['acc'][-1] >= target config = optimizers.serialize(optimizer) optim = optimizers.deserialize(config) new_config = optimizers.serialize(optim) new_config['class_name'] = new_config['class_name'].lower() assert config == new_config # Test constraints. model = Sequential() dense = Dense(10, input_shape=(x_train.shape[1],), kernel_constraint=lambda x: 0. * x + 1., bias_constraint=lambda x: 0. * x + 2.,) model.add(dense) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.train_on_batch(x_train[:10], y_train[:10]) kernel, bias = dense.get_weights() assert_allclose(kernel, 1.) assert_allclose(bias, 2.)
Example #11
Source File: optimizers_test.py From faceswap with GNU General Public License v3.0 | 5 votes |
def _test_optimizer(optimizer, target=0.75): x_train, y_train = get_test_data() model = Sequential() model.add(Dense(10, input_shape=(x_train.shape[1],))) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0) # TODO PlaidML fails this test assert history.history['acc'][-1] >= target config = k_optimizers.serialize(optimizer) optim = k_optimizers.deserialize(config) new_config = k_optimizers.serialize(optim) new_config['class_name'] = new_config['class_name'].lower() assert config == new_config # Test constraints. model = Sequential() dense = Dense(10, input_shape=(x_train.shape[1],), kernel_constraint=lambda x: 0. * x + 1., bias_constraint=lambda x: 0. * x + 2.,) model.add(dense) model.add(Activation('relu')) model.add(Dense(y_train.shape[1])) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy']) model.train_on_batch(x_train[:10], y_train[:10]) kernel, bias = dense.get_weights() assert_allclose(kernel, 1.) assert_allclose(bias, 2.)
Example #12
Source File: l2optimizer.py From DIIN-in-Keras with MIT License | 5 votes |
def get_config(self): config = {'optimizer': serialize(self.optimizer), 'l2_full_step': float(K.get_value(self.l2_full_step)), 'l2_full_ratio': float(K.get_value(self.l2_full_ratio)), 'l2_difference_full_ratio': float(K.get_value(self.l2_difference_full_ratio))} return config
Example #13
Source File: spark_model.py From elephas with MIT License | 4 votes |
def __init__(self, model, mode='asynchronous', frequency='epoch', parameter_server_mode='http', num_workers=None, custom_objects=None, batch_size=32, port=4000, *args, **kwargs): """SparkModel Base class for distributed training on RDDs. Spark model takes a Keras model as master network, an optimization scheme, a parallelisation mode and an averaging frequency. :param model: Compiled Keras model :param mode: String, choose from `asynchronous`, `synchronous` and `hogwild` :param frequency: String, either `epoch` or `batch` :param parameter_server_mode: String, either `http` or `socket` :param num_workers: int, number of workers used for training (defaults to None) :param custom_objects: Keras custom objects :param batch_size: batch size used for training and inference :param port: port used in case of 'http' parameter server mode """ self._master_network = model if not hasattr(model, "loss"): raise Exception( "Compile your Keras model before initializing an Elephas model with it") metrics = model.metrics loss = model.loss optimizer = serialize_optimizer(model.optimizer) if custom_objects is None: custom_objects = {} if metrics is None: metrics = ["accuracy"] self.mode = mode self.frequency = frequency self.num_workers = num_workers self.weights = self._master_network.get_weights() self.pickled_weights = None self.master_optimizer = optimizer self.master_loss = loss self.master_metrics = metrics self.custom_objects = custom_objects self.parameter_server_mode = parameter_server_mode self.batch_size = batch_size self.port = port self.kwargs = kwargs self.serialized_model = model_to_dict(model) if self.mode is not 'synchronous': if self.parameter_server_mode == 'http': self.parameter_server = HttpServer( self.serialized_model, self.mode, self.port) self.client = HttpClient(self.port) elif self.parameter_server_mode == 'socket': self.parameter_server = SocketServer(self.serialized_model) self.client = SocketClient() else: raise ValueError("Parameter server mode has to be either `http` or `socket`, " "got {}".format(self.parameter_server_mode))