Python tensorflow.Variables() Examples
The following are 30
code examples of tensorflow.Variables().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: natural_grad.py From rltf with MIT License | 6 votes |
def _fisher_vector_product(self, mean_kl: tf.Tensor, var_list: list) -> Callable: """Get a function that computes the product of the KL Hessian and some vector v. Use the fact that Hv = d^2 L / dt^2 v = d/dt (dL/dt) v = d/dt gv Args: mean_kl: tf.Tensor. The KL divergence between the old and the new policy var_list: list of tf.Variables for which to compute gradients Returns: lambda, which takes as input a vector v and computes the product Hv """ # Compute the gradients of the KL divergence w.r.t. var_list and flatten them grads = tf.gradients(mean_kl, var_list) grad = tf_cg.flatten_tensors(grads) # shape: [None] def compute_hvp(v): # Compute the dot product between grad and v v = tf.stop_gradient(v) gvp = tf.reduce_sum(grad * v) # Compute the matrix-vector product `Hv`, between the Hessian and v and flatten it hvps = tf.gradients(gvp, var_list) hvp = tf_cg.flatten_tensors(hvps) hvp = tf.check_numerics(hvp, message="Invalid Fisher-vector product") return hvp return compute_hvp
Example #2
Source File: q1_classifier.py From CS224n with GNU General Public License v3.0 | 6 votes |
def add_prediction_op(self): """Adds the core transformation for this model which transforms a batch of input data into a batch of predictions. In this case, the transformation is a linear layer plus a softmax transformation: y = softmax(Wx + b) Hint: Make sure to create tf.Variables as needed. Hint: For this simple use-case, it's sufficient to initialize both weights W and biases b with zeros. Args: input_data: A tensor of shape (batch_size, n_features). Returns: pred: A tensor of shape (batch_size, n_classes) """ ### YOUR CODE HERE with tf.variable_scope("transformation"): bias = tf.Variable(tf.random_uniform([self.config.n_classes])) W = tf.Variable(tf.random_uniform([self.config.n_features, self.config.n_classes])) z = tf.matmul(self.input_placeholder, W) + bias pred = softmax(z) ### END YOUR CODE return pred
Example #3
Source File: tf_utils.py From energy-py with MIT License | 6 votes |
def get_tf_params(scope): """ Makes a list of all tf.Variables under this scope args scope (str) returns params (list) """ # find scope parameters params = [p for p in tf.trainable_variables() if p.name.startswith(scope)] # sort parameters list by the variable name return sorted(params, key=lambda var: var.name)
Example #4
Source File: utils.py From RFHO with MIT License | 6 votes |
def var_list(self, mode=VlMode.RAW): """ Get the chunks that define this variable. :param mode: (optional, default VL_MODE.RAW) VL_MODE.RAW: returns simply var_list, that may contain tf.Variables or MergedVariables VL_MODE.BASE: returns a list of tf.Variables that are the "base" variables that for this MergedVariable VL_MODE.TENSOR: returns a list of tf.Variables or tf.Tensor from the MergedVariables :return: A list that may contain tf.Tensors, tf.Variables and/or MergedVariables """ if mode == VlMode.RAW: return self._var_list elif mode == VlMode.BASE: return self._get_base_variable_list() elif mode == VlMode.TENSOR: return self._var_list_as_tensors() # return w unic tensor + copies augmented else: raise NotImplementedError('mode %d does not exists' % mode)
Example #5
Source File: tfutil.py From transparent_latent_gan with MIT License | 6 votes |
def create_session(config_dict=dict(), force_as_default=False): config = tf.ConfigProto() for key, value in config_dict.items(): fields = key.split('.') obj = config for field in fields[:-1]: obj = getattr(obj, field) setattr(obj, fields[-1], value) session = tf.Session(config=config) if force_as_default: session._default_session = session.as_default() session._default_session.enforce_nesting = False session._default_session.__enter__() return session #---------------------------------------------------------------------------- # Initialize all tf.Variables that have not already been initialized. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tf.variables_initializer(tf.report_unitialized_variables()).run()
Example #6
Source File: tfutil.py From transparent_latent_gan with MIT License | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #7
Source File: tfutil.py From higan with MIT License | 6 votes |
def set_vars(var_to_value_dict: dict) -> None: """Set the values of given tf.Variables. Equivalent to the following, but more efficient and does not bloat the tf graph: tflib.run([tf.assign(var, value) for var, value in var_to_value_dict.items()] """ assert_tf_initialized() ops = [] feed_dict = {} for var, value in var_to_value_dict.items(): assert is_tf_expression(var) try: setter = tf.get_default_graph().get_tensor_by_name(var.name.replace(":0", "/setter:0")) # look for existing op except KeyError: with absolute_name_scope(var.name.split(":")[0]): with tf.control_dependencies(None): # ignore surrounding control_dependencies setter = tf.assign(var, tf.placeholder(var.dtype, var.shape, "new_value"), name="setter") # create new setter ops.append(setter) feed_dict[setter.op.inputs[1]] = value run(ops, feed_dict)
Example #8
Source File: tfutil.py From higan with MIT License | 6 votes |
def create_session(config_dict=dict(), force_as_default=False): config = tf.ConfigProto() for key, value in config_dict.items(): fields = key.split('.') obj = config for field in fields[:-1]: obj = getattr(obj, field) setattr(obj, fields[-1], value) session = tf.Session(config=config) if force_as_default: session._default_session = session.as_default() session._default_session.enforce_nesting = False session._default_session.__enter__() return session #---------------------------------------------------------------------------- # Initialize all tf.Variables that have not already been initialized. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tf.variables_initializer(tf.report_unitialized_variables()).run()
Example #9
Source File: tfutil.py From higan with MIT License | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #10
Source File: tfutil.py From disentangling_conditional_gans with MIT License | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #11
Source File: module.py From hub with Apache License 2.0 | 6 votes |
def variable_map(self): """Map from original variable names into tf.Variables (or lists of them). This map translates between variable names relative to the module and the corresponding Variable objects that have been created by instantiating it in the current graph (with the applicable scoping added). Each key in the map is a variable name as created by running the module's defining `module_fn` in the root scope of an empty graph. Each value in the map is a Variable object, or in case of partitioned variables a list of Variable objects. This property can be used with `tf.init_from_checkpoint` as `assignment_map` in order to restore a pre-trained checkpoint into a Module before calling `Module.export()`. Returns: A dict from the variable names in the Module to the instantiated tf.Variables or list of tf.Variables (if partitioned). The keys of this map are the same regardless of the scope of where the Module was instantiated. """ return self._impl.variable_map
Example #12
Source File: utils.py From g-tensorflow-models with Apache License 2.0 | 6 votes |
def assert_no_new_variables(): """Ensure that no tf.Variables are constructed inside the context. Yields: None Raises: ValueError: if there is a variable created. """ num_vars = len(tf.global_variables()) old_variables = tf.global_variables() yield if len(tf.global_variables()) != num_vars: new_vars = set(tf.global_variables()) - set(old_variables) tf.logging.error("NEW VARIABLES CREATED") tf.logging.error(10*"=") for v in new_vars: tf.logging.error(v) raise ValueError("Variables created inside an " "assert_no_new_variables context") if old_variables != tf.global_variables(): raise ValueError("Variables somehow changed inside an " "assert_no_new_variables context." "This means something modified the tf.global_variables()")
Example #13
Source File: utils.py From models with Apache License 2.0 | 6 votes |
def assert_no_new_variables(): """Ensure that no tf.Variables are constructed inside the context. Yields: None Raises: ValueError: if there is a variable created. """ num_vars = len(tf.global_variables()) old_variables = tf.global_variables() yield if len(tf.global_variables()) != num_vars: new_vars = set(tf.global_variables()) - set(old_variables) tf.logging.error("NEW VARIABLES CREATED") tf.logging.error(10*"=") for v in new_vars: tf.logging.error(v) raise ValueError("Variables created inside an " "assert_no_new_variables context") if old_variables != tf.global_variables(): raise ValueError("Variables somehow changed inside an " "assert_no_new_variables context." "This means something modified the tf.global_variables()")
Example #14
Source File: tfutil.py From interfacegan with MIT License | 6 votes |
def create_session(config_dict=dict(), force_as_default=False): config = tf.ConfigProto() for key, value in config_dict.items(): fields = key.split('.') obj = config for field in fields[:-1]: obj = getattr(obj, field) setattr(obj, fields[-1], value) session = tf.Session(config=config) if force_as_default: session._default_session = session.as_default() session._default_session.enforce_nesting = False session._default_session.__enter__() return session #---------------------------------------------------------------------------- # Initialize all tf.Variables that have not already been initialized. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tf.variables_initializer(tf.report_unitialized_variables()).run()
Example #15
Source File: tfutil.py From interfacegan with MIT License | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #16
Source File: tfutil.py From interfacegan with MIT License | 6 votes |
def set_vars(var_to_value_dict: dict) -> None: """Set the values of given tf.Variables. Equivalent to the following, but more efficient and does not bloat the tf graph: tflib.run([tf.assign(var, value) for var, value in var_to_value_dict.items()] """ assert_tf_initialized() ops = [] feed_dict = {} for var, value in var_to_value_dict.items(): assert is_tf_expression(var) try: setter = tf.get_default_graph().get_tensor_by_name(var.name.replace(":0", "/setter:0")) # look for existing op except KeyError: with absolute_name_scope(var.name.split(":")[0]): with tf.control_dependencies(None): # ignore surrounding control_dependencies setter = tf.assign(var, tf.placeholder(var.dtype, var.shape, "new_value"), name="setter") # create new setter ops.append(setter) feed_dict[setter.op.inputs[1]] = value run(ops, feed_dict)
Example #17
Source File: utils.py From multilabel-image-classification-tensorflow with MIT License | 6 votes |
def assert_no_new_variables(): """Ensure that no tf.Variables are constructed inside the context. Yields: None Raises: ValueError: if there is a variable created. """ num_vars = len(tf.global_variables()) old_variables = tf.global_variables() yield if len(tf.global_variables()) != num_vars: new_vars = set(tf.global_variables()) - set(old_variables) tf.logging.error("NEW VARIABLES CREATED") tf.logging.error(10*"=") for v in new_vars: tf.logging.error(v) raise ValueError("Variables created inside an " "assert_no_new_variables context") if old_variables != tf.global_variables(): raise ValueError("Variables somehow changed inside an " "assert_no_new_variables context." "This means something modified the tf.global_variables()")
Example #18
Source File: tfutil.py From disentangling_conditional_gans with MIT License | 6 votes |
def create_session(config_dict=dict(), force_as_default=False): config = tf.ConfigProto() for key, value in config_dict.items(): fields = key.split('.') obj = config for field in fields[:-1]: obj = getattr(obj, field) setattr(obj, fields[-1], value) session = tf.Session(config=config) if force_as_default: session._default_session = session.as_default() session._default_session.enforce_nesting = False session._default_session.__enter__() return session #---------------------------------------------------------------------------- # Initialize all tf.Variables that have not already been initialized. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tf.variables_initializer(tf.report_unitialized_variables()).run()
Example #19
Source File: tfutil.py From tileGAN with GNU General Public License v3.0 | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #20
Source File: tfutil.py From tileGAN with GNU General Public License v3.0 | 6 votes |
def create_session(config_dict=dict(), force_as_default=False): config = tf.ConfigProto(allow_soft_placement=True) for key, value in config_dict.items(): fields = key.split('.') obj = config for field in fields[:-1]: obj = getattr(obj, field) setattr(obj, fields[-1], value) session = tf.Session(config=config) if force_as_default: session._default_session = session.as_default() session._default_session.enforce_nesting = False session._default_session.__enter__() return session #---------------------------------------------------------------------------- # Initialize all tf.Variables that have not already been initialized. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tf.variables_initializer(tf.report_unitialized_variables()).run()
Example #21
Source File: tfutil.py From ai-platform with MIT License | 6 votes |
def set_vars(var_to_value_dict: dict) -> None: """Set the values of given tf.Variables. Equivalent to the following, but more efficient and does not bloat the tf graph: tflib.run([tf.assign(var, value) for var, value in var_to_value_dict.items()] """ assert_tf_initialized() ops = [] feed_dict = {} for var, value in var_to_value_dict.items(): assert is_tf_expression(var) try: setter = tf.get_default_graph().get_tensor_by_name(var.name.replace(":0", "/setter:0")) # look for existing op except KeyError: with absolute_name_scope(var.name.split(":")[0]): with tf.control_dependencies(None): # ignore surrounding control_dependencies setter = tf.assign(var, tf.placeholder(var.dtype, var.shape, "new_value"), name="setter") # create new setter ops.append(setter) feed_dict[setter.op.inputs[1]] = value run(ops, feed_dict)
Example #22
Source File: utils.py From Gun-Detector with Apache License 2.0 | 6 votes |
def assert_no_new_variables(): """Ensure that no tf.Variables are constructed inside the context. Yields: None Raises: ValueError: if there is a variable created. """ num_vars = len(tf.global_variables()) old_variables = tf.global_variables() yield if len(tf.global_variables()) != num_vars: new_vars = set(tf.global_variables()) - set(old_variables) tf.logging.error("NEW VARIABLES CREATED") tf.logging.error(10*"=") for v in new_vars: tf.logging.error(v) raise ValueError("Variables created inside an " "assert_no_new_variables context") if old_variables != tf.global_variables(): raise ValueError("Variables somehow changed inside an " "assert_no_new_variables context." "This means something modified the tf.global_variables()")
Example #23
Source File: q1_classifier.py From cs224d with MIT License | 6 votes |
def add_model(self, input_data): """Adds a linear-layer plus a softmax transformation The core transformation for this model which transforms a batch of input data into a batch of predictions. In this case, the mathematical transformation effected is y = softmax(xW + b) Hint: Make sure to create tf.Variables as needed. Also, make sure to use tf.name_scope to ensure that your name spaces are clean. Hint: For this simple use-case, it's sufficient to initialize both weights W and biases b with zeros. Args: input_data: A tensor of shape (batch_size, n_features). Returns: out: A tensor of shape (batch_size, n_classes) """ with tf.variable_scope("softmax_model"): self.W = tf.Variable(tf.zeros([self.config.n_features, self.config.n_classes]), name="weights") self.b = tf.Variable(tf.zeros([self.config.n_classes]), name="biases") return softmax(tf.matmul(input_data, self.W) + self.b)
Example #24
Source File: optimizers.py From kfac with Apache License 2.0 | 5 votes |
def _set_hyper(self, name, value): """Set hyper `name` to value. value must be numeric.""" if self._hypers_created: if not isinstance(self._hyper[name], tf.Variable): raise AttributeError("Can't set attribute: {}".format(name)) if not isinstance(value, numbers.Number): raise ValueError('Dynamic reassignment only supports setting with a ' 'number. tf.Tensors and tf.Variables can only be used ' 'before the internal kfac optimizer is created.') backend.set_value(self._hyper[name], value) else: super(Kfac, self)._set_hyper(name, value)
Example #25
Source File: misc.py From tfdiffeq with MIT License | 5 votes |
def move_to_device(x, device): """ Utility function to move a tensor to a device """ if device is None: return x # tf.Variables cannot be moved to a device if not isinstance(x, tf.Tensor): return x if isinstance(device, tf.Tensor): device = device.device # check if device is empty string if len(device) == 0: return x if '/' in device: device = device.replace('/', '') splits = device.split(':')[-2:] device, id = splits id = int(id) x_device = x.device.lower() if 'cpu' in device.lower() and 'cpu' not in x_device: x = tf.identity(x) elif 'gpu' in device.lower() and 'gpu' not in x_device: x = x.gpu(id) return x
Example #26
Source File: distributions.py From aboleth with Apache License 2.0 | 5 votes |
def norm_posterior(dim, std0, suffix=None): """Initialise a posterior (diagonal) Normal distribution. Parameters ---------- dim : tuple or list the dimension of this distribution. std0 : float, np.array the initial (unoptimized) standard deviation of this distribution. Must be a scalar or have the same shape as dim. suffix : str suffix to add to the names of the variables of the parameters of this distribution. Returns ------- Q : tf.distributions.Normal the initialised posterior Normal object. Note ---- This will make tf.Variables on the mean standard deviation of the posterior. The initialisation of the mean is zero and the initialisation of the standard deviation is simply ``std0`` for each element. """ assert (np.ndim(std0) == 0) or (np.shape(std0) == dim) mu_0 = tf.zeros(dim) mu = tf.Variable(mu_0, name=_add_suffix("W_mu_q", suffix)) if np.ndim(std0) == 0: std0 = tf.ones(dim) * std0 std = pos_variable(std0, name=_add_suffix("W_std_q", suffix)) summary_histogram(mu) summary_histogram(std) Q = tf.distributions.Normal(loc=mu, scale=std) return Q
Example #27
Source File: tfutil.py From interfacegan with MIT License | 5 votes |
def init_uninitialized_vars(target_vars: List[tf.Variable] = None) -> None: """Initialize all tf.Variables that have not already been initialized. Equivalent to the following, but more efficient and does not bloat the tf graph: tf.variables_initializer(tf.report_uninitialized_variables()).run() """ assert_tf_initialized() if target_vars is None: target_vars = tf.global_variables() test_vars = [] test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in target_vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(":0", "/IsVariableInitialized:0")) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(":")[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars])
Example #28
Source File: tfutil.py From ai-platform with MIT License | 5 votes |
def init_uninitialized_vars(target_vars: List[tf.Variable] = None) -> None: """Initialize all tf.Variables that have not already been initialized. Equivalent to the following, but more efficient and does not bloat the tf graph: tf.variables_initializer(tf.report_uninitialized_variables()).run() """ assert_tf_initialized() if target_vars is None: target_vars = tf.global_variables() test_vars = [] test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in target_vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(":0", "/IsVariableInitialized:0")) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(":")[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars])
Example #29
Source File: graph_builder.py From rlgraph with Apache License 2.0 | 5 votes |
def count_trainable_parameters(): """ Counts the number of trainable parameters (e.g. tf.Variables) to get a rough idea of how complex our Model is. Returns: int: The number of trainable parameters in the graph. """ num_trainable_parameters = 0 if get_backend() == "tf": for variable in tf.trainable_variables(): num_trainable_parameters += get_shape(variable, flat=True) return num_trainable_parameters
Example #30
Source File: module.py From hub with Apache License 2.0 | 5 votes |
def variables(self): """Returns the list of all tf.Variables created by module instantiation.""" result = [] for _, value in sorted(self.variable_map.items()): if isinstance(value, list): result.extend(value) else: result.append(value) return result