Python tensorflow.keras.backend.random_uniform() Examples
The following are 12
code examples of tensorflow.keras.backend.random_uniform().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.keras.backend
, or try the search function
.
Example #1
Source File: metrics.py From neuron with GNU General Public License v3.0 | 6 votes |
def loss(self, y_true, y_pred): # get the value for the true and fake images disc_true = self.disc(y_true) disc_pred = self.disc(y_pred) # sample a x_hat by sampling along the line between true and pred # z = tf.placeholder(tf.float32, shape=[None, 1]) # shp = y_true.get_shape()[0] # WARNING: SHOULD REALLY BE shape=[batch_size, 1] !!! # self.batch_size does not work, since it's not None!!! alpha = K.random_uniform(shape=[K.shape(y_pred)[0], 1, 1, 1]) diff = y_pred - y_true interp = y_true + alpha * diff # take gradient of D(x_hat) gradients = K.gradients(self.disc(interp), [interp])[0] grad_pen = K.mean(K.square(K.sqrt(K.sum(K.square(gradients), axis=1))-1)) # compute loss return (K.mean(disc_pred) - K.mean(disc_true)) + self.lambda_gp * grad_pen
Example #2
Source File: noisy_dense.py From tf2rl with MIT License | 5 votes |
def call(self, inputs): # Implement Eq.(9) perturbed_kernel = self.kernel + \ self.sigma_kernel * K.random_uniform(shape=self.kernel_shape) outputs = K.dot(inputs, perturbed_kernel) if self.use_bias: perturbed_bias = self.bias + \ self.sigma_bias * K.random_uniform(shape=self.bias_shape) outputs = K.bias_add(outputs, perturbed_bias) if self.activation is not None: outputs = self.activation(outputs) return outputs
Example #3
Source File: RandomSequenceMask.py From tape-neurips2019 with MIT License | 5 votes |
def _generate_bert_mask(self, inputs): mask_shape = K.shape(inputs) bert_mask = K.random_uniform(mask_shape) < self.percentage return bert_mask
Example #4
Source File: RandomSequenceMask.py From tape-neurips2019 with MIT License | 5 votes |
def call(self, inputs: tf.Tensor, mask: Optional[tf.Tensor] = None): """ Args: inputs (tf.Tensor[ndims=2, int]): Tensor of values to mask mask (Optional[tf.Tensor[bool]]): Locations in the inputs to that are valid (i.e. not padding, start tokens, etc.) Returns: masked_inputs (tf.Tensor[ndims=2, int]): Tensor of masked values bert_mask: Locations in the input that were masked """ bert_mask = self._generate_bert_mask(inputs) if mask is not None: bert_mask &= mask masked_inputs = inputs * tf.cast(~bert_mask, inputs.dtype) token_bert_mask = K.random_uniform(K.shape(bert_mask)) < 0.8 random_bert_mask = (K.random_uniform( K.shape(bert_mask)) < 0.1) & ~token_bert_mask true_bert_mask = ~token_bert_mask & ~random_bert_mask token_bert_mask = tf.cast(token_bert_mask & bert_mask, inputs.dtype) random_bert_mask = tf.cast(random_bert_mask & bert_mask, inputs.dtype) true_bert_mask = tf.cast(true_bert_mask & bert_mask, inputs.dtype) masked_inputs += self.mask_token * token_bert_mask # type: ignore masked_inputs += K.random_uniform( K.shape(bert_mask), 0, self.n_symbols, dtype=inputs.dtype) * random_bert_mask masked_inputs += inputs * true_bert_mask return masked_inputs, bert_mask
Example #5
Source File: BeplerModel.py From tape-neurips2019 with MIT License | 5 votes |
def _generate_bert_mask(self, inputs): mask_shape = K.shape(inputs) bert_mask = K.random_uniform(mask_shape) < self.percentage return bert_mask
Example #6
Source File: BeplerModel.py From tape-neurips2019 with MIT License | 5 votes |
def call(self, inputs: tf.Tensor, mask: Optional[tf.Tensor] = None): """ Args: inputs (tf.Tensor[ndims=2, int]): Tensor of values to mask mask (Optional[tf.Tensor[bool]]): Locations in the inputs to that are valid (i.e. not padding, start tokens, etc.) Returns: masked_inputs (tf.Tensor[ndims=2, int]): Tensor of masked values bert_mask: Locations in the input that were masked """ random_mask = self._generate_bert_mask(inputs) if mask is not None: random_mask &= mask masked_inputs = inputs * tf.cast(~random_mask, inputs.dtype) random_mask = tf.cast(random_mask, inputs.dtype) masked_inputs += K.random_uniform( K.shape(random_mask), 0, self.n_symbols, dtype=inputs.dtype) * random_mask return masked_inputs
Example #7
Source File: ttfs.py From snn_toolbox with MIT License | 5 votes |
def softmax_activation(mem): """Softmax activation.""" return k.cast(k.less_equal(k.random_uniform(k.shape(mem)), k.softmax(mem)), k.floatx())
Example #8
Source File: ttfs_dyn_thresh.py From snn_toolbox with MIT License | 5 votes |
def softmax_activation(mem): """Softmax activation.""" return k.cast(k.less_equal(k.random_uniform(k.shape(mem)), k.softmax(mem)), k.floatx())
Example #9
Source File: networks.py From brainstorm with MIT License | 5 votes |
def call(self, inputs): if self.n_dims == 2: rand_flow = K.random_uniform( shape=tf.convert_to_tensor( [tf.shape(inputs)[0], tf.shape(inputs)[1], tf.shape(inputs)[2], self.n_dims]), minval=-self.flow_amp, maxval=self.flow_amp, dtype='float32') rand_flow = tf.nn.depthwise_conv2d(rand_flow, self.blur_kernel, strides=[1] * (self.n_dims + 2), padding='SAME') elif self.n_dims == 3: rand_flow = K.random_uniform( shape=tf.convert_to_tensor( [tf.shape(inputs)[0], tf.shape(inputs)[1], tf.shape(inputs)[2], tf.shape(inputs)[3], self.n_dims]), minval=-self.flow_amp, maxval=self.flow_amp, dtype='float32') # blur it here, then again later? rand_flow_list = tf.unstack(rand_flow, num=self.n_dims, axis=-1) flow_chans = [] for c in range(self.n_dims): flow_chan = tf.nn.conv3d(tf.expand_dims(rand_flow_list[c], axis=-1), self.blur_kernel, strides=[1] * (self.n_dims + 2), padding='SAME') flow_chans.append(flow_chan[:, :, :, :, 0]) rand_flow = tf.stack(flow_chans, axis=-1) rand_flow = tf.reshape(rand_flow, [-1] + list(self.flow_shape)) return rand_flow
Example #10
Source File: ttfs.py From snn_toolbox with MIT License | 4 votes |
def init_membrane_potential(self, output_shape=None, mode='zero'): """Initialize membrane potential. Helpful to avoid transient response in the beginning of the simulation. Not needed when reset between frames is turned off, e.g. with a video data set. Parameters ---------- output_shape: Optional[tuple] Output shape mode: str Initialization mode. - ``'uniform'``: Random numbers from uniform distribution in ``[-thr, thr]``. - ``'bias'``: Negative bias. - ``'zero'``: Zero (default). Returns ------- init_mem: ndarray A tensor of ``self.output_shape`` (same as layer). """ if output_shape is None: output_shape = self.output_shape if mode == 'uniform': init_mem = k.random_uniform(output_shape, -self._v_thresh, self._v_thresh) elif mode == 'bias': init_mem = np.zeros(output_shape, k.floatx()) if hasattr(self, 'bias'): bias = self.get_weights()[1] for i in range(len(bias)): # Todo: This assumes data_format = 'channels_first' init_mem[:, i, Ellipsis] = bias[i] self.add_update([(self.bias, np.zeros_like(bias))]) else: # mode == 'zero': init_mem = np.zeros(output_shape, k.floatx()) return init_mem
Example #11
Source File: ttfs_dyn_thresh.py From snn_toolbox with MIT License | 4 votes |
def init_membrane_potential(self, output_shape=None, mode='zero'): """Initialize membrane potential. Helpful to avoid transient response in the beginning of the simulation. Not needed when reset between frames is turned off, e.g. with a video data set. Parameters ---------- output_shape: Optional[tuple] Output shape mode: str Initialization mode. - ``'uniform'``: Random numbers from uniform distribution in ``[-thr, thr]``. - ``'bias'``: Negative bias. - ``'zero'``: Zero (default). Returns ------- init_mem: ndarray A tensor of ``self.output_shape`` (same as layer). """ if output_shape is None: output_shape = self.output_shape if mode == 'uniform': init_mem = k.random_uniform(output_shape, -self._v_thresh, self._v_thresh) elif mode == 'bias': init_mem = np.zeros(output_shape, k.floatx()) if hasattr(self, 'b'): b = self.get_weights()[1] for i in range(len(b)): init_mem[:, i, Ellipsis] = -b[i] else: # mode == 'zero': init_mem = np.zeros(output_shape, k.floatx()) return init_mem
Example #12
Source File: ttfs_corrective.py From snn_toolbox with MIT License | 4 votes |
def init_membrane_potential(self, output_shape=None, mode='zero'): """Initialize membrane potential. Helpful to avoid transient response in the beginning of the simulation. Not needed when reset between frames is turned off, e.g. with a video data set. Parameters ---------- output_shape: Optional[tuple] Output shape mode: str Initialization mode. - ``'uniform'``: Random numbers from uniform distribution in ``[-thr, thr]``. - ``'bias'``: Negative bias. - ``'zero'``: Zero (default). Returns ------- init_mem: ndarray A tensor of ``self.output_shape`` (same as layer). """ if output_shape is None: output_shape = self.output_shape if mode == 'uniform': init_mem = k.random_uniform(output_shape, -self._v_thresh, self._v_thresh) elif mode == 'bias': init_mem = np.zeros(output_shape, k.floatx()) if hasattr(self, 'b'): b = self.get_weights()[1] for i in range(len(b)): init_mem[:, i, Ellipsis] = -b[i] else: # mode == 'zero': init_mem = np.zeros(output_shape, k.floatx()) return init_mem