Python keras.metrics.binary_crossentropy() Examples
The following are 17
code examples of keras.metrics.binary_crossentropy().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
keras.metrics
, or try the search function
.
Example #1
Source File: cifar10_generate.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean_squash): x = K.flatten(x) x_decoded_mean_squash = K.flatten(x_decoded_mean_squash) xent_loss = img_rows * img_cols * metrics.binary_crossentropy(x, x_decoded_mean_squash) kl_loss = - 0.5 * K.mean(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #2
Source File: keras_utils.py From Benchmarks with MIT License | 5 votes |
def xent(y_true, y_pred): return binary_crossentropy(y_true, y_pred)
Example #3
Source File: p1b1_baseline_keras2.py From Benchmarks with MIT License | 5 votes |
def xent(y_true, y_pred): return binary_crossentropy(y_true, y_pred)
Example #4
Source File: vae_common.py From keras_experiments with The Unlicense | 5 votes |
def vae_loss(self, x, x_decoded_mean_squash, z_mean, z_log_var): x = K.flatten(x) x_decoded_mean_squash = K.flatten(x_decoded_mean_squash) img_rows, img_cols = self._img_rows, self._img_cols # generative or reconstruction loss xent_loss = img_rows * img_cols * \ metrics.binary_crossentropy(x, x_decoded_mean_squash) # Kullback-Leibler divergence loss kl_loss = - 0.5 * K.mean( 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #5
Source File: variational_autoencoder_deconv.py From keras_experiments with The Unlicense | 5 votes |
def vae_loss(self, x, x_decoded_mean_squash): x = K.flatten(x) x_decoded_mean_squash = K.flatten(x_decoded_mean_squash) xent_loss = img_rows * img_cols * \ metrics.binary_crossentropy(x, x_decoded_mean_squash) kl_loss = - 0.5 * K.mean( 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #6
Source File: variational_autoencoder_deconv.py From pCVR with Apache License 2.0 | 5 votes |
def vae_loss(self, x, x_decoded_mean_squash): x = K.flatten(x) x_decoded_mean_squash = K.flatten(x_decoded_mean_squash) xent_loss = img_rows * img_cols * metrics.binary_crossentropy(x, x_decoded_mean_squash) kl_loss = - 0.5 * K.mean(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #7
Source File: variational_autoencoder.py From pCVR with Apache License 2.0 | 5 votes |
def vae_loss(self, x, x_decoded_mean): xent_loss = original_dim * metrics.binary_crossentropy(x, x_decoded_mean) kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #8
Source File: caltech101_92_train.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean_squash): x = K.flatten(x) x_decoded_mean_squash = K.flatten(x_decoded_mean_squash) xent_loss = img_rows * img_cols * metrics.binary_crossentropy(x, x_decoded_mean_squash) kl_loss = - 0.5 * K.mean(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #9
Source File: mnist_train.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean): xent_loss = original_dim * metrics.binary_crossentropy(x, x_decoded_mean) kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #10
Source File: mnist_general_latent_space_and_generate.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean): xent_loss = original_dim * metrics.binary_crossentropy(x, x_decoded_mean) kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #11
Source File: cifar10_train.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean_squash): x = K.flatten(x) x_decoded_mean_squash = K.flatten(x_decoded_mean_squash) xent_loss = img_rows * img_cols * metrics.binary_crossentropy(x, x_decoded_mean_squash) kl_loss = - 0.5 * K.mean(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #12
Source File: caltech101_128_generate.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean_squash): x = K.flatten(x) x_decoded_mean_squash = K.flatten(x_decoded_mean_squash) xent_loss = img_rows * img_cols * metrics.binary_crossentropy(x, x_decoded_mean_squash) kl_loss = - 0.5 * K.mean(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #13
Source File: mnist_3d_latent_space_and_generate.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean): xent_loss = original_dim * metrics.binary_crossentropy(x, x_decoded_mean) kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #14
Source File: caltech101_128_train.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean_squash): x = K.flatten(x) x_decoded_mean_squash = K.flatten(x_decoded_mean_squash) xent_loss = img_rows * img_cols * metrics.binary_crossentropy(x, x_decoded_mean_squash) kl_loss = - 0.5 * K.mean(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #15
Source File: mnist_2d_latent_space_and_generate.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean): xent_loss = original_dim * metrics.binary_crossentropy(x, x_decoded_mean) kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #16
Source File: caltech101_92_generate.py From VAE-for-Image-Generation with MIT License | 5 votes |
def vae_loss(self, x, x_decoded_mean_squash): x = K.flatten(x) x_decoded_mean_squash = K.flatten(x_decoded_mean_squash) xent_loss = img_rows * img_cols * metrics.binary_crossentropy(x, x_decoded_mean_squash) kl_loss = - 0.5 * K.mean(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) return K.mean(xent_loss + kl_loss)
Example #17
Source File: step2_train_nodule_detector.py From kaggle_ndsb2017 with MIT License | 4 votes |
def get_net(input_shape=(CUBE_SIZE, CUBE_SIZE, CUBE_SIZE, 1), load_weight_path=None, features=False, mal=False) -> Model: inputs = Input(shape=input_shape, name="input_1") x = inputs x = AveragePooling3D(pool_size=(2, 1, 1), strides=(2, 1, 1), border_mode="same")(x) x = Convolution3D(64, 3, 3, 3, activation='relu', border_mode='same', name='conv1', subsample=(1, 1, 1))(x) x = MaxPooling3D(pool_size=(1, 2, 2), strides=(1, 2, 2), border_mode='valid', name='pool1')(x) # 2nd layer group x = Convolution3D(128, 3, 3, 3, activation='relu', border_mode='same', name='conv2', subsample=(1, 1, 1))(x) x = MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), border_mode='valid', name='pool2')(x) if USE_DROPOUT: x = Dropout(p=0.3)(x) # 3rd layer group x = Convolution3D(256, 3, 3, 3, activation='relu', border_mode='same', name='conv3a', subsample=(1, 1, 1))(x) x = Convolution3D(256, 3, 3, 3, activation='relu', border_mode='same', name='conv3b', subsample=(1, 1, 1))(x) x = MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), border_mode='valid', name='pool3')(x) if USE_DROPOUT: x = Dropout(p=0.4)(x) # 4th layer group x = Convolution3D(512, 3, 3, 3, activation='relu', border_mode='same', name='conv4a', subsample=(1, 1, 1))(x) x = Convolution3D(512, 3, 3, 3, activation='relu', border_mode='same', name='conv4b', subsample=(1, 1, 1),)(x) x = MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2), border_mode='valid', name='pool4')(x) if USE_DROPOUT: x = Dropout(p=0.5)(x) last64 = Convolution3D(64, 2, 2, 2, activation="relu", name="last_64")(x) out_class = Convolution3D(1, 1, 1, 1, activation="sigmoid", name="out_class_last")(last64) out_class = Flatten(name="out_class")(out_class) out_malignancy = Convolution3D(1, 1, 1, 1, activation=None, name="out_malignancy_last")(last64) out_malignancy = Flatten(name="out_malignancy")(out_malignancy) model = Model(input=inputs, output=[out_class, out_malignancy]) if load_weight_path is not None: model.load_weights(load_weight_path, by_name=False) model.compile(optimizer=SGD(lr=LEARN_RATE, momentum=0.9, nesterov=True), loss={"out_class": "binary_crossentropy", "out_malignancy": mean_absolute_error}, metrics={"out_class": [binary_accuracy, binary_crossentropy], "out_malignancy": mean_absolute_error}) if features: model = Model(input=inputs, output=[last64]) model.summary(line_length=140) return model