Python tensorflow.python.ops.init_ops.random_uniform_initializer() Examples
The following are 15
code examples of tensorflow.python.ops.init_ops.random_uniform_initializer().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.ops.init_ops
, or try the search function
.
Example #1
Source File: core_rnn_cell_impl.py From auto-alt-text-lambda-api with MIT License | 7 votes |
def __call__(self, inputs, state, scope=None): """Run the cell on embedded inputs.""" with vs.variable_scope(scope or "embedding_wrapper"): # "EmbeddingWrapper" with ops.device("/cpu:0"): if self._initializer: initializer = self._initializer elif vs.get_variable_scope().initializer: initializer = vs.get_variable_scope().initializer else: # Default initializer for embeddings should have variance=1. sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1. initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3) if type(state) is tuple: data_type = state[0].dtype else: data_type = state.dtype embedding = vs.get_variable( "embedding", [self._embedding_classes, self._embedding_size], initializer=initializer, dtype=data_type) embedded = embedding_ops.embedding_lookup( embedding, array_ops.reshape(inputs, [-1])) return self._cell(embedded, state)
Example #2
Source File: core_rnn_cell.py From lambda-packs with MIT License | 6 votes |
def call(self, inputs, state): """Run the cell on embedded inputs.""" with ops.device("/cpu:0"): if self._initializer: initializer = self._initializer elif vs.get_variable_scope().initializer: initializer = vs.get_variable_scope().initializer else: # Default initializer for embeddings should have variance=1. sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1. initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3) if isinstance(state, tuple): data_type = state[0].dtype else: data_type = state.dtype embedding = vs.get_variable( "embedding", [self._embedding_classes, self._embedding_size], initializer=initializer, dtype=data_type) embedded = embedding_ops.embedding_lookup(embedding, array_ops.reshape(inputs, [-1])) return self._cell(embedded, state)
Example #3
Source File: core_rnn_cell.py From Multiview2Novelview with MIT License | 6 votes |
def call(self, inputs, state): """Run the cell on embedded inputs.""" with ops.device("/cpu:0"): if self._initializer: initializer = self._initializer elif vs.get_variable_scope().initializer: initializer = vs.get_variable_scope().initializer else: # Default initializer for embeddings should have variance=1. sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1. initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3) if isinstance(state, tuple): data_type = state[0].dtype else: data_type = state.dtype embedding = vs.get_variable( "embedding", [self._embedding_classes, self._embedding_size], initializer=initializer, dtype=data_type) embedded = embedding_ops.embedding_lookup(embedding, array_ops.reshape(inputs, [-1])) return self._cell(embedded, state)
Example #4
Source File: rnn_cell.py From ROLO with Apache License 2.0 | 6 votes |
def __call__(self, inputs, state, scope=None): """Run the cell on embedded inputs.""" with vs.variable_scope(scope or type(self).__name__): # "EmbeddingWrapper" with ops.device("/cpu:0"): if self._initializer: initializer = self._initializer elif vs.get_variable_scope().initializer: initializer = vs.get_variable_scope().initializer else: # Default initializer for embeddings should have variance=1. sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1. initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3) if type(state) is tuple: data_type = state[0].dtype else: data_type = state.dtype embedding = vs.get_variable( "embedding", [self._embedding_classes, self._embedding_size], initializer=initializer, dtype=data_type) embedded = embedding_ops.embedding_lookup( embedding, array_ops.reshape(inputs, [-1])) return self._cell(embedded, state)
Example #5
Source File: rnn_cell.py From deep_image_model with Apache License 2.0 | 6 votes |
def __call__(self, inputs, state, scope=None): """Run the cell on embedded inputs.""" with vs.variable_scope(scope or type(self).__name__): # "EmbeddingWrapper" with ops.device("/cpu:0"): if self._initializer: initializer = self._initializer elif vs.get_variable_scope().initializer: initializer = vs.get_variable_scope().initializer else: # Default initializer for embeddings should have variance=1. sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1. initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3) if type(state) is tuple: data_type = state[0].dtype else: data_type = state.dtype embedding = vs.get_variable( "embedding", [self._embedding_classes, self._embedding_size], initializer=initializer, dtype=data_type) embedded = embedding_ops.embedding_lookup( embedding, array_ops.reshape(inputs, [-1])) return self._cell(embedded, state)
Example #6
Source File: rnn_cell.py From ecm with Apache License 2.0 | 6 votes |
def __call__(self, inputs, state, scope=None): """Run the cell on embedded inputs.""" with vs.variable_scope(scope or type(self).__name__): # "EmbeddingWrapper" with ops.device("/cpu:0"): if self._initializer: initializer = self._initializer elif vs.get_variable_scope().initializer: initializer = vs.get_variable_scope().initializer else: # Default initializer for embeddings should have variance=1. sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1. initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3) if type(state) is tuple: data_type = state[0].dtype else: data_type = state.dtype embedding = vs.get_variable( "embedding", [self._embedding_classes, self._embedding_size], initializer=initializer, dtype=data_type) embedded = embedding_ops.embedding_lookup( embedding, array_ops.reshape(inputs, [-1])) return self._cell(embedded, state)
Example #7
Source File: mod_core_rnn_cell_impl.py From RGAN with MIT License | 6 votes |
def __call__(self, inputs, state, scope=None): """Run the cell on embedded inputs.""" with _checked_scope(self, scope or "embedding_wrapper", reuse=self._reuse): with ops.device("/cpu:0"): if self._initializer: initializer = self._initializer elif vs.get_variable_scope().initializer: initializer = vs.get_variable_scope().initializer else: # Default initializer for embeddings should have variance=1. sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1. initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3) if type(state) is tuple: data_type = state[0].dtype else: data_type = state.dtype embedding = vs.get_variable( "embedding", [self._embedding_classes, self._embedding_size], initializer=initializer, dtype=data_type) embedded = embedding_ops.embedding_lookup( embedding, array_ops.reshape(inputs, [-1])) return self._cell(embedded, state)
Example #8
Source File: embedding.py From NJUNMT-tf with Apache License 2.0 | 6 votes |
def _build(self): """ build embedding table and build position embedding table if timing=="emb" :return: """ self._embeddings = variable_scope.get_variable( name=(self._name or "embedding_table"), shape=[self._vocab_size, self._dimension], initializer=init_ops.random_uniform_initializer( -self._init_scale, self._init_scale)) if self._timing == "emb": self._position_embedding = variable_scope.get_variable( name=(self._name or "embedding_table") + "_posi", shape=[self._maximum_position, self._dimension], initializer=init_ops.random_uniform_initializer( -self._init_scale, self._init_scale))
Example #9
Source File: EUNN.py From AmusingPythonCodes with MIT License | 6 votes |
def __call__(self, inputs, state, scope=None): with vs.variable_scope(scope or "eunn_cell"): state = _eunn_loop(state, self._capacity, self.diag_vec, self.off_vec, self.diag, self._fft) input_matrix_init = init_ops.random_uniform_initializer(-0.01, 0.01) if self._comp: input_matrix_re = vs.get_variable("U_re", [inputs.get_shape()[-1], self._hidden_size], initializer=input_matrix_init) input_matrix_im = vs.get_variable("U_im", [inputs.get_shape()[-1], self._hidden_size], initializer=input_matrix_init) inputs_re = math_ops.matmul(inputs, input_matrix_re) inputs_im = math_ops.matmul(inputs, input_matrix_im) inputs = math_ops.complex(inputs_re, inputs_im) else: input_matrix = vs.get_variable("U", [inputs.get_shape()[-1], self._hidden_size], initializer=input_matrix_init) inputs = math_ops.matmul(inputs, input_matrix) bias = vs.get_variable("modReLUBias", [self._hidden_size], initializer=init_ops.constant_initializer()) output = self._activation((inputs + state), bias, self._comp) return output, output
Example #10
Source File: core_rnn_cell_impl.py From keras-lambda with MIT License | 6 votes |
def __call__(self, inputs, state, scope=None): """Run the cell on embedded inputs.""" with vs.variable_scope(scope or "embedding_wrapper"): # "EmbeddingWrapper" with ops.device("/cpu:0"): if self._initializer: initializer = self._initializer elif vs.get_variable_scope().initializer: initializer = vs.get_variable_scope().initializer else: # Default initializer for embeddings should have variance=1. sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1. initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3) if type(state) is tuple: data_type = state[0].dtype else: data_type = state.dtype embedding = vs.get_variable( "embedding", [self._embedding_classes, self._embedding_size], initializer=initializer, dtype=data_type) embedded = embedding_ops.embedding_lookup( embedding, array_ops.reshape(inputs, [-1])) return self._cell(embedded, state)
Example #11
Source File: backend.py From lambda-packs with MIT License | 5 votes |
def random_uniform_variable(shape, low, high, dtype=None, name=None, seed=None): """Instantiates a variable with values drawn from a uniform distribution. Arguments: shape: Tuple of integers, shape of returned Keras variable. low: Float, lower boundary of the output interval. high: Float, upper boundary of the output interval. dtype: String, dtype of returned Keras variable. name: String, name of returned Keras variable. seed: Integer, random seed. Returns: A Keras variable, filled with drawn samples. Example: ```python # TensorFlow example >>> kvar = K.random_uniform_variable((2,3), 0, 1) >>> kvar <tensorflow.python.ops.variables.Variable object at 0x10ab40b10> >>> K.eval(kvar) array([[ 0.10940075, 0.10047495, 0.476143 ], [ 0.66137183, 0.00869417, 0.89220798]], dtype=float32) ``` """ if dtype is None: dtype = floatx() shape = tuple(map(int, shape)) tf_dtype = _convert_string_dtype(dtype) if seed is None: # ensure that randomness is conditioned by the Numpy RNG seed = np.random.randint(10e8) value = init_ops.random_uniform_initializer( low, high, dtype=tf_dtype, seed=seed)(shape) return variable(value, dtype=dtype, name=name)
Example #12
Source File: GORU.py From rotational-unit-of-memory with MIT License | 5 votes |
def __call__(self, inputs, state, scope=None): with vs.variable_scope(scope or "goru_cell"): U_init = init_ops.random_uniform_initializer(-0.01, 0.01) b_init = init_ops.constant_initializer(2.) mod_b_init = init_ops.constant_initializer(2.) U = vs.get_variable("U", [inputs.get_shape( )[-1], self._hidden_size * 3], dtype=tf.float32, initializer=U_init) Ux = math_ops.matmul(inputs, U) U_cx, U_rx, U_gx = array_ops.split(Ux, 3, axis=1) W_r = vs.get_variable( "W_r", [self._hidden_size, self._hidden_size], dtype=tf.float32, initializer=U_init) W_g = vs.get_variable( "W_g", [self._hidden_size, self._hidden_size], dtype=tf.float32, initializer=U_init) W_rh = math_ops.matmul(state, W_r) W_gh = math_ops.matmul(state, W_g) bias_r = vs.get_variable( "bias_r", [self._hidden_size], dtype=tf.float32, initializer=b_init) bias_g = vs.get_variable( "bias_g", [self._hidden_size], dtype=tf.float32) bias_c = vs.get_variable( "bias_c", [self._hidden_size], dtype=tf.float32, initializer=mod_b_init) r_tmp = U_rx + W_rh + bias_r g_tmp = U_gx + W_gh + bias_g r = math_ops.sigmoid(r_tmp) g = math_ops.sigmoid(g_tmp) Unitaryh = _eunn_loop( state, self._capacity, self.diag_vec, self.off_vec, self.diag, self._fft) c = modrelu(math_ops.multiply(r, Unitaryh) + U_cx, bias_c, False) new_state = math_ops.multiply( g, state) + math_ops.multiply(1 - g, c) return new_state, new_state
Example #13
Source File: EUNN.py From rotational-unit-of-memory with MIT License | 5 votes |
def __call__(self, inputs, state, scope=None): with vs.variable_scope(scope or "eunn_cell"): state = _eunn_loop(state, self._capacity, self.diag_vec, self.off_vec, self.diag, self._fft) input_matrix_init = init_ops.random_uniform_initializer( -0.01, 0.01) if self._comp: input_matrix_re = vs.get_variable("U_re", [inputs.get_shape( )[-1], self._hidden_size], initializer=input_matrix_init) input_matrix_im = vs.get_variable("U_im", [inputs.get_shape( )[-1], self._hidden_size], initializer=input_matrix_init) inputs_re = math_ops.matmul(inputs, input_matrix_re) inputs_im = math_ops.matmul(inputs, input_matrix_im) inputs = math_ops.complex(inputs_re, inputs_im) else: input_matrix = vs.get_variable( "U", [inputs.get_shape()[-1], self._hidden_size], initializer=input_matrix_init) inputs = math_ops.matmul(inputs, input_matrix) bias = vs.get_variable( "modReLUBias", [self._hidden_size], initializer=init_ops.constant_initializer()) output = self._activation((inputs + state), bias, self._comp) return output, output
Example #14
Source File: GORU.py From AmusingPythonCodes with MIT License | 5 votes |
def __call__(self, inputs, state, scope=None): with vs.variable_scope(scope or "goru_cell"): U_init = init_ops.random_uniform_initializer(-0.01, 0.01) b_init = init_ops.constant_initializer(2.) mod_b_init = init_ops.constant_initializer(0.01) U = vs.get_variable("U", [inputs.get_shape()[-1], self._hidden_size * 3], dtype=tf.float32, initializer=U_init) Ux = math_ops.matmul(inputs, U) U_cx, U_rx, U_gx = array_ops.split(Ux, 3, axis=1) W_r = vs.get_variable("W_r", [self._hidden_size, self._hidden_size], dtype=tf.float32, initializer=U_init) W_g = vs.get_variable("W_g", [self._hidden_size, self._hidden_size], dtype=tf.float32, initializer=U_init) W_rh = math_ops.matmul(state, W_r) W_gh = math_ops.matmul(state, W_g) bias_r = vs.get_variable("bias_r", [self._hidden_size], dtype=tf.float32, initializer=b_init) bias_g = vs.get_variable("bias_g", [self._hidden_size], dtype=tf.float32) bias_c = vs.get_variable("bias_c", [self._hidden_size], dtype=tf.float32, initializer=mod_b_init) r_tmp = U_rx + W_rh + bias_r g_tmp = U_gx + W_gh + bias_g r = math_ops.sigmoid(r_tmp) g = math_ops.sigmoid(g_tmp) Unitaryh = _eunn_loop(state, self._capacity, self.diag_vec, self.off_vec, self.diag, self._fft) c = modrelu(math_ops.multiply(r, Unitaryh) + U_cx, bias_c, False) new_state = math_ops.multiply(g, state) + math_ops.multiply(1 - g, c) return new_state, new_state
Example #15
Source File: backend.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def random_uniform_variable(shape, low, high, dtype=None, name=None, seed=None): """Instantiates a variable with values drawn from a uniform distribution. Arguments: shape: Tuple of integers, shape of returned Keras variable. low: Float, lower boundary of the output interval. high: Float, upper boundary of the output interval. dtype: String, dtype of returned Keras variable. name: String, name of returned Keras variable. seed: Integer, random seed. Returns: A Keras variable, filled with drawn samples. Example: ```python # TensorFlow example >>> kvar = K.random_uniform_variable((2,3), 0, 1) >>> kvar <tensorflow.python.ops.variables.Variable object at 0x10ab40b10> >>> K.eval(kvar) array([[ 0.10940075, 0.10047495, 0.476143 ], [ 0.66137183, 0.00869417, 0.89220798]], dtype=float32) ``` """ if dtype is None: dtype = floatx() tf_dtype = _convert_string_dtype(dtype) if seed is None: # ensure that randomness is conditioned by the Numpy RNG seed = np.random.randint(10e8) value = init_ops.random_uniform_initializer( low, high, dtype=tf_dtype, seed=seed)(shape) return variable(value, dtype=dtype, name=name)