Python tensorflow.local_variables() Examples
The following are 30
code examples of tensorflow.local_variables().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: variable_mgr.py From dlcookbook-dlbs with Apache License 2.0 | 6 votes |
def savable_variables(self): """Returns a list/dict of savable variables to pass to tf.train.Saver.""" params = {} for v in tf.global_variables(): assert (v.name.startswith(variable_mgr_util.PS_SHADOW_VAR_PREFIX + '/v0/') or v.name in ('global_step:0', 'loss_scale:0', 'loss_scale_normal_steps:0')), ( 'Invalid global variable: %s' % v) # We store variables in the checkpoint with the shadow variable prefix # removed so we can evaluate checkpoints in non-distributed replicated # mode. The checkpoints can also be loaded for training in # distributed_replicated mode. name = self._strip_port(self._remove_shadow_var_prefix_if_present(v.name)) params[name] = v for v in tf.local_variables(): # Non-trainable variables, such as batch norm moving averages, do not have # corresponding global shadow variables, so we add them here. Trainable # local variables have corresponding global shadow variables, which were # added in the global variable loop above. if v.name.startswith('v0/') and v not in tf.trainable_variables(): params[self._strip_port(v.name)] = v return params
Example #2
Source File: variables.py From deep_image_model with Apache License 2.0 | 6 votes |
def local_variables(): """Returns local variables. Local variables - per process variables, usually not saved/restored to checkpoint and used for temporary or intermediate values. For example, they can be used as counters for metrics computation or number of epochs this machine has read data. The `local_variable()` automatically adds new variable to `GraphKeys.LOCAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to local variables are global variables. See [`tf.global_variables()`](../../api_docs/python/state_ops.md#global_variables) Returns: A list of local `Variable` objects. """ return ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES)
Example #3
Source File: metric_ops_test.py From deep_image_model with Apache License 2.0 | 6 votes |
def _test_streaming_sparse_average_precision_at_k( self, predictions, labels, k, expected, weights=None): with tf.Graph().as_default() as g, self.test_session(g): if weights is not None: weights = tf.constant(weights, tf.float32) predictions = tf.constant(predictions, tf.float32) metric, update = metrics.streaming_sparse_average_precision_at_k( predictions, labels, k, weights=weights) # Fails without initialized vars. self.assertRaises(tf.OpError, metric.eval) self.assertRaises(tf.OpError, update.eval) local_variables = tf.local_variables() tf.initialize_variables(local_variables).run() # Run per-step op and assert expected values. if math.isnan(expected): _assert_nan(self, update.eval()) _assert_nan(self, metric.eval()) else: self.assertAlmostEqual(expected, update.eval()) self.assertAlmostEqual(expected, metric.eval())
Example #4
Source File: variables.py From keras-lambda with MIT License | 6 votes |
def global_variables(): """Returns global variables. Global variables are variables that are shared across machines in a distributed environment. The `Variable()` constructor or `get_variable()` automatically adds new variables to the graph collection `GraphKeys.GLOBAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to global variables are local variables. See [`tf.local_variables()`](../../api_docs/python/state_ops.md#local_variables) Returns: A list of `Variable` objects. """ return ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
Example #5
Source File: variables.py From deep_image_model with Apache License 2.0 | 6 votes |
def global_variables(): """Returns global variables. Global variables are variables that are shared across machines in a distributed environment. The `Variable()` constructor or `get_variable()` automatically adds new variables to the graph collection `GraphKeys.GLOBAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to global variables are local variables. See [`tf.local_variables()`](../../api_docs/python/state_ops.md#local_variables) Returns: A list of `Variable` objects. """ return ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
Example #6
Source File: variables.py From keras-lambda with MIT License | 6 votes |
def local_variables(): """Returns local variables. Local variables - per process variables, usually not saved/restored to checkpoint and used for temporary or intermediate values. For example, they can be used as counters for metrics computation or number of epochs this machine has read data. The `local_variable()` automatically adds new variable to `GraphKeys.LOCAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to local variables are global variables. See [`tf.global_variables()`](../../api_docs/python/state_ops.md#global_variables) Returns: A list of local `Variable` objects. """ return ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES)
Example #7
Source File: variables.py From auto-alt-text-lambda-api with MIT License | 6 votes |
def local_variables(): """Returns local variables. Local variables - per process variables, usually not saved/restored to checkpoint and used for temporary or intermediate values. For example, they can be used as counters for metrics computation or number of epochs this machine has read data. The `local_variable()` automatically adds new variable to `GraphKeys.LOCAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to local variables are global variables. See [`tf.global_variables()`](../../api_docs/python/state_ops.md#global_variables) Returns: A list of local `Variable` objects. """ return ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES)
Example #8
Source File: variables.py From auto-alt-text-lambda-api with MIT License | 6 votes |
def global_variables(): """Returns global variables. Global variables are variables that are shared across machines in a distributed environment. The `Variable()` constructor or `get_variable()` automatically adds new variables to the graph collection `GraphKeys.GLOBAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to global variables are local variables. See [`tf.local_variables()`](../../api_docs/python/state_ops.md#local_variables) Returns: A list of `Variable` objects. """ return ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
Example #9
Source File: distributed.py From tensorpack with Apache License 2.0 | 6 votes |
def _get_initial_sync_op(self): """ Get the op to copy-initialized all local variables from PS. """ def strip_port(s): if s.endswith(':0'): return s[:-2] return s local_vars = tf.local_variables() local_var_by_name = {strip_port(v.name): v for v in local_vars} ops = [] nr_shadow_vars = len(self._shadow_vars) for v in self._shadow_vars: vname = strip_port(v.name) for i in range(self.nr_gpu): name = 'tower%s/%s' % (i, vname) assert name in local_var_by_name, \ "Shadow variable {} doesn't match a corresponding local variable!".format(v.name) copy_to = local_var_by_name[name] # logger.info("{} -> {}".format(v.name, copy_to.name)) ops.append(copy_to.assign(v.read_value())) return tf.group(*ops, name='sync_{}_variables_from_ps'.format(nr_shadow_vars))
Example #10
Source File: test_core_utils.py From lm-human-preferences with MIT License | 6 votes |
def test_sample_buffer(): capacity = 100 batch = 17 lots = 100 with tf.Graph().as_default(), tf.Session() as sess: buffer = utils.SampleBuffer(capacity=capacity, schemas=dict(x=utils.Schema(tf.int32, ()))) tf.variables_initializer(tf.global_variables() + tf.local_variables()).run() i_p = tf.placeholder(dtype=tf.int32, shape=()) add = buffer.add(x=batch * i_p + tf.range(batch)) sample = buffer.sample(lots, seed=7)['x'] all_data_1 = buffer.data() all_data_2 = buffer.read(tf.range(buffer.size())) for i in range(20): add.run(feed_dict={i_p: i}) samples = sample.eval() hi = batch * (i + 1) lo = max(0, hi - capacity) assert lo <= samples.min() <= lo + 3 assert hi - 5 <= samples.max() < hi np.testing.assert_equal(sess.run(all_data_1), sess.run(all_data_2))
Example #11
Source File: variables.py From lambda-packs with MIT License | 6 votes |
def local_variables(): """Returns local variables. Local variables - per process variables, usually not saved/restored to checkpoint and used for temporary or intermediate values. For example, they can be used as counters for metrics computation or number of epochs this machine has read data. The `tf.contrib.framework.local_variable()` function automatically adds the new variable to `GraphKeys.LOCAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to local variables are global variables. See @{tf.global_variables} Returns: A list of local `Variable` objects. """ return ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES)
Example #12
Source File: variables.py From lambda-packs with MIT License | 6 votes |
def global_variables(): """Returns global variables. Global variables are variables that are shared across machines in a distributed environment. The `Variable()` constructor or `get_variable()` automatically adds new variables to the graph collection `GraphKeys.GLOBAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to global variables are local variables. See @{tf.local_variables} Returns: A list of `Variable` objects. """ return ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
Example #13
Source File: metric_ops_test.py From deep_image_model with Apache License 2.0 | 6 votes |
def _test_streaming_sparse_precision_at_top_k(self, top_k_predictions, labels, expected, class_id=None, weights=None): with tf.Graph().as_default() as g, self.test_session(g): if weights is not None: weights = tf.constant(weights, tf.float32) metric, update = metrics.streaming_sparse_precision_at_top_k( top_k_predictions=tf.constant(top_k_predictions, tf.int32), labels=labels, class_id=class_id, weights=weights) # Fails without initialized vars. self.assertRaises(tf.OpError, metric.eval) self.assertRaises(tf.OpError, update.eval) tf.initialize_variables(tf.local_variables()).run() # Run per-step op and assert expected values. if math.isnan(expected): self.assertTrue(math.isnan(update.eval())) self.assertTrue(math.isnan(metric.eval())) else: self.assertEqual(expected, update.eval()) self.assertEqual(expected, metric.eval())
Example #14
Source File: variables.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 6 votes |
def local_variables(scope=None): """Returns local variables. Local variables - per process variables, usually not saved/restored to checkpoint and used for temporary or intermediate values. For example, they can be used as counters for metrics computation or number of epochs this machine has read data. The `tf.contrib.framework.local_variable()` function automatically adds the new variable to `GraphKeys.LOCAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to local variables are global variables. See @{tf.global_variables} Args: scope: (Optional.) A string. If supplied, the resulting list is filtered to include only items whose `name` attribute matches `scope` using `re.match`. Items without a `name` attribute are never returned if a scope is supplied. The choice of `re.match` means that a `scope` without special tokens filters by prefix. Returns: A list of local `Variable` objects. """ return ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES, scope)
Example #15
Source File: utils.py From mbpo with MIT License | 6 votes |
def initialize_tf_variables(session, only_uninitialized=True): variables = tf.global_variables() + tf.local_variables() def is_initialized(variable): try: session.run(variable) return True except tf.errors.FailedPreconditionError: return False return False if only_uninitialized: variables = [ variable for variable in variables if not is_initialized(variable) ] session.run(tf.variables_initializer(variables))
Example #16
Source File: variables.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 6 votes |
def global_variables(scope=None): """Returns global variables. Global variables are variables that are shared across machines in a distributed environment. The `Variable()` constructor or `get_variable()` automatically adds new variables to the graph collection `GraphKeys.GLOBAL_VARIABLES`. This convenience function returns the contents of that collection. An alternative to global variables are local variables. See @{tf.local_variables} Args: scope: (Optional.) A string. If supplied, the resulting list is filtered to include only items whose `name` attribute matches `scope` using `re.match`. Items without a `name` attribute are never returned if a scope is supplied. The choice of `re.match` means that a `scope` without special tokens filters by prefix. Returns: A list of `Variable` objects. """ return ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES, scope)
Example #17
Source File: variable_mgr.py From dlcookbook-dlbs with Apache License 2.0 | 6 votes |
def get_post_init_ops(self): # Copy initialized variables for variables on the parameter server # to the local copy of the variable. local_vars = tf.local_variables() local_var_by_name = dict( [(self._strip_port(v.name), v) for v in local_vars]) post_init_ops = [] for v in tf.global_variables(): if v.name.startswith(variable_mgr_util.PS_SHADOW_VAR_PREFIX + '/v0/'): prefix = self._strip_port( v.name[len(variable_mgr_util.PS_SHADOW_VAR_PREFIX + '/v0'):]) for i in range(self.benchmark_cnn.num_gpus): name = 'v%s%s' % (i, prefix) if name in local_var_by_name: copy_to = local_var_by_name[name] post_init_ops.append(copy_to.assign(v.read_value())) return post_init_ops
Example #18
Source File: SAGN.py From Distributed-TensorFlow-Guide with MIT License | 6 votes |
def create_global_variables(local_optimizer_vars = []): """Creates global variables for local variables on the graph. Skips variables local variables that are created for local optimization. Returns dictionarys for local-to-global and global-to-local variable mappings. """ local_to_global = {} global_to_local = {} with tf.device('/job:ps/task:0'): for v in tf.local_variables(): if v not in local_optimizer_vars: v_g = tf.get_variable('g/'+v.op.name, shape = v.shape, dtype = v.dtype, trainable=True, collections=[tf.GraphKeys.GLOBAL_VARIABLES, tf.GraphKeys.TRAINABLE_VARIABLES]) local_to_global[v] = v_g global_to_local[v_g] = v return local_to_global,global_to_local
Example #19
Source File: variable_mgr.py From deeplearning-benchmark with Apache License 2.0 | 6 votes |
def get_post_init_ops(self): # Copy initialized variables for variables on the parameter server # to the local copy of the variable. local_vars = tf.local_variables() local_var_by_name = dict( [(self._strip_port(v.name), v) for v in local_vars]) post_init_ops = [] for v in tf.global_variables(): if v.name.startswith(PS_SHADOW_VAR_PREFIX + '/v0/'): prefix = self._strip_port( v.name[len(PS_SHADOW_VAR_PREFIX + '/v0'):]) for i in range(self.benchmark_cnn.num_gpus): name = 'v%s%s' % (i, prefix) if name in local_var_by_name: copy_to = local_var_by_name[name] post_init_ops.append(copy_to.assign(v.read_value())) return post_init_ops
Example #20
Source File: variable_mgr.py From deeplearning-benchmark with Apache License 2.0 | 6 votes |
def savable_variables(self): """Returns a list/dict of savable variables to pass to tf.train.Saver.""" params = {} for v in tf.global_variables(): assert (v.name.startswith(PS_SHADOW_VAR_PREFIX + '/v0/') or v.name == 'global_step:0') # We store variables in the checkpoint with the shadow variable prefix # removed so we can evaluate checkpoints in non-distributed replicated # mode. The checkpoints can also be loaded for training in # distributed_replicated mode. name = self._strip_port(self._remove_shadow_var_prefix_if_present(v.name)) params[name] = v for v in tf.local_variables(): # Non-trainable variables, such as batch norm moving averages, do not have # corresponding global shadow variables, so we add them here. Trainable # local variables have corresponding global shadow variables, which were # added in the global variable loop above. if v.name.startswith('v0/') and v not in tf.trainable_variables(): params[self._strip_port(v.name)] = v return params
Example #21
Source File: DOWNPOUR.py From Distributed-TensorFlow-Guide with MIT License | 6 votes |
def create_global_variables(local_optimizer_vars = []): """Creates global variables for local variables on the graph. Skips variables local variables that are created for local optimization. Returns dictionarys for local-to-global and global-to-local variable mappings. """ local_to_global = {} global_to_local = {} with tf.device('/job:ps/task:0'): for v in tf.local_variables(): if v not in local_optimizer_vars: v_g = tf.get_variable('g/'+v.op.name, shape = v.shape, dtype = v.dtype, trainable=True, collections=[tf.GraphKeys.GLOBAL_VARIABLES, tf.GraphKeys.TRAINABLE_VARIABLES]) local_to_global[v] = v_g global_to_local[v_g] = v return local_to_global,global_to_local
Example #22
Source File: variable_mgr.py From tf-imagenet with Apache License 2.0 | 6 votes |
def get_post_init_ops(self): # Copy initialized variables for variables on the parameter server # to the local copy of the variable. local_vars = tf.local_variables() local_var_by_name = dict( [(self._strip_port(v.name), v) for v in local_vars]) post_init_ops = [] for v in tf.global_variables(): if v.name.startswith(PS_SHADOW_VAR_PREFIX + '/v0/'): prefix = self._strip_port( v.name[len(PS_SHADOW_VAR_PREFIX + '/v0'):]) for i in range(self.benchmark_cnn.num_gpus): name = 'v%s%s' % (i, prefix) if name in local_var_by_name: copy_to = local_var_by_name[name] post_init_ops.append(copy_to.assign(v.read_value())) return post_init_ops
Example #23
Source File: variable_mgr.py From tf-imagenet with Apache License 2.0 | 6 votes |
def savable_variables(self): """Returns a list/dict of savable variables to pass to tf.train.Saver.""" params = {} for v in tf.global_variables(): assert (v.name.startswith(PS_SHADOW_VAR_PREFIX + '/v0/') or v.name == 'global_step:0') # We store variables in the checkpoint with the shadow variable prefix # removed so we can evaluate checkpoints in non-distributed replicated # mode. The checkpoints can also be loaded for training in # distributed_replicated mode. name = self._strip_port(self._remove_shadow_var_prefix_if_present(v.name)) params[name] = v for v in tf.local_variables(): # Non-trainable variables, such as batch norm moving averages, do not have # corresponding global shadow variables, so we add them here. Trainable # local variables have corresponding global shadow variables, which were # added in the global variable loop above. if v.name.startswith('v0/') and v not in tf.trainable_variables(): params[self._strip_port(v.name)] = v return params
Example #24
Source File: DOWNPOUR.py From Distributed-TensorFlow-Guide with MIT License | 6 votes |
def create_global_variables(): """Creates global variables for local variables on the graph. Returns dictionarys for local-to-global and global-to-local variable mappings. """ local_to_global = {} global_to_local = {} with tf.device('/job:ps/task:0'): for v in tf.local_variables(): v_g = tf.get_variable('g/'+v.op.name, shape = v.shape, dtype = v.dtype, trainable=True, collections=[tf.GraphKeys.GLOBAL_VARIABLES,tf.GraphKeys.TRAINABLE_VARIABLES]) local_to_global[v] = v_g global_to_local[v_g] = v return local_to_global,global_to_local
Example #25
Source File: distributed.py From petridishnn with MIT License | 6 votes |
def _get_initial_sync_op(self): """ Get the op to copy-initialized all local variables from PS. """ def strip_port(s): if s.endswith(':0'): return s[:-2] return s local_vars = tf.local_variables() local_var_by_name = dict([(strip_port(v.name), v) for v in local_vars]) ops = [] nr_shadow_vars = len(self._shadow_vars) for v in self._shadow_vars: vname = strip_port(v.name) for i in range(self.nr_gpu): name = 'tower%s/%s' % (i, vname) assert name in local_var_by_name, \ "Shadow variable {} doesn't match a corresponding local variable!".format(v.name) copy_to = local_var_by_name[name] # logger.info("{} -> {}".format(v.name, copy_to.name)) ops.append(copy_to.assign(v.read_value())) return tf.group(*ops, name='sync_{}_variables_from_ps'.format(nr_shadow_vars))
Example #26
Source File: AGN.py From Distributed-TensorFlow-Guide with MIT License | 6 votes |
def create_global_variables(local_optimizer_vars = []): """Creates global variables for local variables on the graph. Skips variables local variables that are created for local optimization. Returns dictionarys for local-to-global and global-to-local variable mappings. """ local_to_global = {} global_to_local = {} with tf.device('/job:ps/task:0'): for v in tf.local_variables(): if v not in local_optimizer_vars: v_g = tf.get_variable('g/'+v.op.name, shape = v.shape, dtype = v.dtype, trainable=True, collections=[tf.GraphKeys.GLOBAL_VARIABLES, tf.GraphKeys.TRAINABLE_VARIABLES]) local_to_global[v] = v_g global_to_local[v_g] = v return local_to_global,global_to_local
Example #27
Source File: distributed.py From ADL with MIT License | 6 votes |
def _get_initial_sync_op(self): """ Get the op to copy-initialized all local variables from PS. """ def strip_port(s): if s.endswith(':0'): return s[:-2] return s local_vars = tf.local_variables() local_var_by_name = {strip_port(v.name): v for v in local_vars} ops = [] nr_shadow_vars = len(self._shadow_vars) for v in self._shadow_vars: vname = strip_port(v.name) for i in range(self.nr_gpu): name = 'tower%s/%s' % (i, vname) assert name in local_var_by_name, \ "Shadow variable {} doesn't match a corresponding local variable!".format(v.name) copy_to = local_var_by_name[name] # logger.info("{} -> {}".format(v.name, copy_to.name)) ops.append(copy_to.assign(v.read_value())) return tf.group(*ops, name='sync_{}_variables_from_ps'.format(nr_shadow_vars))
Example #28
Source File: rnn.py From deepbgc with MIT License | 6 votes |
def auc_roc(y_true, y_pred): """ Defines AUC ROC metric callback, inspired by https://github.com/keras-team/keras/issues/6050#issuecomment-329996505 """ import tensorflow as tf # any tensorflow metric value, update_op = tf.metrics.auc(y_true, y_pred) # find all variables created for this metric metric_vars = [i for i in tf.local_variables() if 'auc_roc' in i.name.split('/')[1]] # Add metric variables to GLOBAL_VARIABLES collection. # They will be initialized for new session. for v in metric_vars: tf.add_to_collection(tf.GraphKeys.GLOBAL_VARIABLES, v) # force to update metric values with tf.control_dependencies([update_op]): value = tf.identity(value) return value
Example #29
Source File: utils.py From lang2program with Apache License 2.0 | 6 votes |
def guarantee_initialized_variables(session, variables=None): """Guarantee that all the specified variables are initialized. If a variable is already initialized, leave it alone. Otherwise, initialize it. If no variables are specified, checks all variables in the default graph. Args: variables (list[tf.Variable]) """ name_to_var = {v.op.name: v for v in tf.global_variables() + tf.local_variables()} uninitialized_variables = list(name_to_var[name] for name in session.run(tf.report_uninitialized_variables(variables))) init_op = tf.variables_initializer(uninitialized_variables) session.run(init_op) return uninitialized_variables
Example #30
Source File: utils.py From lang2program with Apache License 2.0 | 6 votes |
def guarantee_initialized_variables(session, variables=None): """Guarantee that all the specified variables are initialized. If a variable is already initialized, leave it alone. Otherwise, initialize it. If no variables are specified, checks all variables in the default graph. Args: variables (list[tf.Variable]) """ name_to_var = {v.op.name: v for v in tf.global_variables() + tf.local_variables()} uninitialized_variables = list(name_to_var[name] for name in session.run(tf.report_uninitialized_variables(variables))) init_op = tf.variables_initializer(uninitialized_variables) session.run(init_op) return uninitialized_variables