Python tensorflow.get_local_variable() Examples
The following are 10
code examples of tensorflow.get_local_variable().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: mpc_agent.py From planet with Apache License 2.0 | 6 votes |
def __init__(self, batch_env, step, is_training, should_log, config): self._batch_env = batch_env self._step = step # Trainer step, not environment step. self._is_training = is_training self._should_log = should_log self._config = config self._cell = config.cell state = self._cell.zero_state(len(batch_env), tf.float32) var_like = lambda x: tf.get_local_variable( x.name.split(':')[0].replace('/', '_') + '_var', shape=x.shape, initializer=lambda *_, **__: tf.zeros_like(x), use_resource=True) self._state = nested.map(var_like, state) self._prev_action = tf.get_local_variable( 'prev_action_var', shape=self._batch_env.action.shape, initializer=lambda *_, **__: tf.zeros_like(self._batch_env.action), use_resource=True)
Example #2
Source File: util.py From deep-molecular-massspec with Apache License 2.0 | 6 votes |
def value_op_with_initializer(value_op_fn, init_op_fn): """Make value_op that gets set by idempotent init_op on first invocation.""" init_has_been_run = tf.get_local_variable( 'has_been_run', initializer=np.zeros(shape=(), dtype=np.bool), dtype=tf.bool) value_op = value_op_fn() def run_init_and_toggle(): init_op = init_op_fn(value_op) with tf.control_dependencies([init_op]): assign_op = init_has_been_run.assign(True) with tf.control_dependencies([assign_op]): return tf.identity(value_op) return tf.cond(init_has_been_run, lambda: value_op, run_init_and_toggle)
Example #3
Source File: mpc_agent.py From dreamer with Apache License 2.0 | 6 votes |
def __init__(self, batch_env, step, is_training, should_log, config): self._step = step # Trainer step, not environment step. self._is_training = is_training self._should_log = should_log self._config = config self._cell = config.cell self._num_envs = len(batch_env) state = self._cell.zero_state(self._num_envs, tf.float32) var_like = lambda x: tf.get_local_variable( x.name.split(':')[0].replace('/', '_') + '_var', shape=x.shape, initializer=lambda *_, **__: tf.zeros_like(x), use_resource=True) self._state = nested.map(var_like, state) batch_action_shape = (self._num_envs,) + batch_env.action_space.shape self._prev_action = tf.get_local_variable( 'prev_action_var', shape=batch_action_shape, initializer=lambda *_, **__: tf.zeros(batch_action_shape), use_resource=True)
Example #4
Source File: trainable_optimizer.py From yolo_v2 with Apache License 2.0 | 4 votes |
def local_state_variables(init_values, return_init_values): """Create local variables initialized from init_values. This will create local variables from a list of init_values. Each variable will be named based on the value's shape and dtype. As a convenience, a boolean tensor allows you to return value from the created local variable or from the original init value. Args: init_values: iterable of tensors return_init_values: boolean tensor Returns: local_vars: list of the created local variables. vals: if return_init_values is true, then this returns the values of init_values. Otherwise it returns the values of the local_vars. """ if not init_values: return [], [] # This generates a harmless warning when saving the metagraph. variable_use_count = tf.get_collection_ref(_LOCAL_STATE_VARIABLE_COLLECTION) if not variable_use_count: variable_use_count.append(collections.defaultdict(int)) variable_use_count = variable_use_count[0] local_vars = [] with tf.variable_scope(OPTIMIZER_SCOPE): # We can't use the init_value as an initializer as init_value may # itself depend on some problem variables. This would produce # inter-variable initialization order dependence which TensorFlow # sucks at making easy. for init_value in init_values: name = create_local_state_variable_name(init_value) unique_name = name + "_" + str(variable_use_count[name]) variable_use_count[name] += 1 # The overarching idea here is to be able to reuse variables between # different sessions on the same TensorFlow master without errors. By # uniquifying based on the type and name we mirror the checks made inside # TensorFlow, while still allowing some memory reuse. Ultimately this is a # hack due to the broken Session.reset(). local_vars.append( tf.get_local_variable( unique_name, initializer=tf.zeros( init_value.get_shape(), dtype=init_value.dtype))) # It makes things a lot simpler if we use the init_value the first # iteration, instead of the variable itself. It allows us to propagate # gradients through it as well as simplifying initialization. The variable # ends up assigned to after the first iteration. vals = tf.cond(return_init_values, lambda: init_values, lambda: local_vars) if len(init_values) == 1: # tf.cond extracts elements from singleton lists. vals = [vals] return local_vars, vals
Example #5
Source File: trainable_optimizer.py From Gun-Detector with Apache License 2.0 | 4 votes |
def local_state_variables(init_values, return_init_values): """Create local variables initialized from init_values. This will create local variables from a list of init_values. Each variable will be named based on the value's shape and dtype. As a convenience, a boolean tensor allows you to return value from the created local variable or from the original init value. Args: init_values: iterable of tensors return_init_values: boolean tensor Returns: local_vars: list of the created local variables. vals: if return_init_values is true, then this returns the values of init_values. Otherwise it returns the values of the local_vars. """ if not init_values: return [], [] # This generates a harmless warning when saving the metagraph. variable_use_count = tf.get_collection_ref(_LOCAL_STATE_VARIABLE_COLLECTION) if not variable_use_count: variable_use_count.append(collections.defaultdict(int)) variable_use_count = variable_use_count[0] local_vars = [] with tf.variable_scope(OPTIMIZER_SCOPE): # We can't use the init_value as an initializer as init_value may # itself depend on some problem variables. This would produce # inter-variable initialization order dependence which TensorFlow # sucks at making easy. for init_value in init_values: name = create_local_state_variable_name(init_value) unique_name = name + "_" + str(variable_use_count[name]) variable_use_count[name] += 1 # The overarching idea here is to be able to reuse variables between # different sessions on the same TensorFlow master without errors. By # uniquifying based on the type and name we mirror the checks made inside # TensorFlow, while still allowing some memory reuse. Ultimately this is a # hack due to the broken Session.reset(). local_vars.append( tf.get_local_variable( unique_name, initializer=tf.zeros( init_value.get_shape(), dtype=init_value.dtype))) # It makes things a lot simpler if we use the init_value the first # iteration, instead of the variable itself. It allows us to propagate # gradients through it as well as simplifying initialization. The variable # ends up assigned to after the first iteration. vals = tf.cond(return_init_values, lambda: init_values, lambda: local_vars) if len(init_values) == 1: # tf.cond extracts elements from singleton lists. vals = [vals] return local_vars, vals
Example #6
Source File: trainable_optimizer.py From object_detection_kitti with Apache License 2.0 | 4 votes |
def local_state_variables(init_values, return_init_values): """Create local variables initialized from init_values. This will create local variables from a list of init_values. Each variable will be named based on the value's shape and dtype. As a convenience, a boolean tensor allows you to return value from the created local variable or from the original init value. Args: init_values: iterable of tensors return_init_values: boolean tensor Returns: local_vars: list of the created local variables. vals: if return_init_values is true, then this returns the values of init_values. Otherwise it returns the values of the local_vars. """ if not init_values: return [], [] # This generates a harmless warning when saving the metagraph. variable_use_count = tf.get_collection_ref(_LOCAL_STATE_VARIABLE_COLLECTION) if not variable_use_count: variable_use_count.append(collections.defaultdict(int)) variable_use_count = variable_use_count[0] local_vars = [] with tf.variable_scope(OPTIMIZER_SCOPE): # We can't use the init_value as an initializer as init_value may # itself depend on some problem variables. This would produce # inter-variable initialization order dependence which TensorFlow # sucks at making easy. for init_value in init_values: name = create_local_state_variable_name(init_value) unique_name = name + "_" + str(variable_use_count[name]) variable_use_count[name] += 1 # The overarching idea here is to be able to reuse variables between # different sessions on the same TensorFlow master without errors. By # uniquifying based on the type and name we mirror the checks made inside # TensorFlow, while still allowing some memory reuse. Ultimately this is a # hack due to the broken Session.reset(). local_vars.append( tf.get_local_variable( unique_name, initializer=tf.zeros( init_value.get_shape(), dtype=init_value.dtype))) # It makes things a lot simpler if we use the init_value the first # iteration, instead of the variable itself. It allows us to propagate # gradients through it as well as simplifying initialization. The variable # ends up assigned to after the first iteration. vals = tf.cond(return_init_values, lambda: init_values, lambda: local_vars) if len(init_values) == 1: # tf.cond extracts elements from singleton lists. vals = [vals] return local_vars, vals
Example #7
Source File: trainable_optimizer.py From object_detection_with_tensorflow with MIT License | 4 votes |
def local_state_variables(init_values, return_init_values): """Create local variables initialized from init_values. This will create local variables from a list of init_values. Each variable will be named based on the value's shape and dtype. As a convenience, a boolean tensor allows you to return value from the created local variable or from the original init value. Args: init_values: iterable of tensors return_init_values: boolean tensor Returns: local_vars: list of the created local variables. vals: if return_init_values is true, then this returns the values of init_values. Otherwise it returns the values of the local_vars. """ if not init_values: return [], [] # This generates a harmless warning when saving the metagraph. variable_use_count = tf.get_collection_ref(_LOCAL_STATE_VARIABLE_COLLECTION) if not variable_use_count: variable_use_count.append(collections.defaultdict(int)) variable_use_count = variable_use_count[0] local_vars = [] with tf.variable_scope(OPTIMIZER_SCOPE): # We can't use the init_value as an initializer as init_value may # itself depend on some problem variables. This would produce # inter-variable initialization order dependence which TensorFlow # sucks at making easy. for init_value in init_values: name = create_local_state_variable_name(init_value) unique_name = name + "_" + str(variable_use_count[name]) variable_use_count[name] += 1 # The overarching idea here is to be able to reuse variables between # different sessions on the same TensorFlow master without errors. By # uniquifying based on the type and name we mirror the checks made inside # TensorFlow, while still allowing some memory reuse. Ultimately this is a # hack due to the broken Session.reset(). local_vars.append( tf.get_local_variable( unique_name, initializer=tf.zeros( init_value.get_shape(), dtype=init_value.dtype))) # It makes things a lot simpler if we use the init_value the first # iteration, instead of the variable itself. It allows us to propagate # gradients through it as well as simplifying initialization. The variable # ends up assigned to after the first iteration. vals = tf.cond(return_init_values, lambda: init_values, lambda: local_vars) if len(init_values) == 1: # tf.cond extracts elements from singleton lists. vals = [vals] return local_vars, vals
Example #8
Source File: trainable_optimizer.py From g-tensorflow-models with Apache License 2.0 | 4 votes |
def local_state_variables(init_values, return_init_values): """Create local variables initialized from init_values. This will create local variables from a list of init_values. Each variable will be named based on the value's shape and dtype. As a convenience, a boolean tensor allows you to return value from the created local variable or from the original init value. Args: init_values: iterable of tensors return_init_values: boolean tensor Returns: local_vars: list of the created local variables. vals: if return_init_values is true, then this returns the values of init_values. Otherwise it returns the values of the local_vars. """ if not init_values: return [], [] # This generates a harmless warning when saving the metagraph. variable_use_count = tf.get_collection_ref(_LOCAL_STATE_VARIABLE_COLLECTION) if not variable_use_count: variable_use_count.append(collections.defaultdict(int)) variable_use_count = variable_use_count[0] local_vars = [] with tf.variable_scope(OPTIMIZER_SCOPE): # We can't use the init_value as an initializer as init_value may # itself depend on some problem variables. This would produce # inter-variable initialization order dependence which TensorFlow # sucks at making easy. for init_value in init_values: name = create_local_state_variable_name(init_value) unique_name = name + "_" + str(variable_use_count[name]) variable_use_count[name] += 1 # The overarching idea here is to be able to reuse variables between # different sessions on the same TensorFlow master without errors. By # uniquifying based on the type and name we mirror the checks made inside # TensorFlow, while still allowing some memory reuse. Ultimately this is a # hack due to the broken Session.reset(). local_vars.append( tf.get_local_variable( unique_name, initializer=tf.zeros( init_value.get_shape(), dtype=init_value.dtype))) # It makes things a lot simpler if we use the init_value the first # iteration, instead of the variable itself. It allows us to propagate # gradients through it as well as simplifying initialization. The variable # ends up assigned to after the first iteration. vals = tf.cond(return_init_values, lambda: init_values, lambda: local_vars) if len(init_values) == 1: # tf.cond extracts elements from singleton lists. vals = [vals] return local_vars, vals
Example #9
Source File: trainable_optimizer.py From models with Apache License 2.0 | 4 votes |
def local_state_variables(init_values, return_init_values): """Create local variables initialized from init_values. This will create local variables from a list of init_values. Each variable will be named based on the value's shape and dtype. As a convenience, a boolean tensor allows you to return value from the created local variable or from the original init value. Args: init_values: iterable of tensors return_init_values: boolean tensor Returns: local_vars: list of the created local variables. vals: if return_init_values is true, then this returns the values of init_values. Otherwise it returns the values of the local_vars. """ if not init_values: return [], [] # This generates a harmless warning when saving the metagraph. variable_use_count = tf.get_collection_ref(_LOCAL_STATE_VARIABLE_COLLECTION) if not variable_use_count: variable_use_count.append(collections.defaultdict(int)) variable_use_count = variable_use_count[0] local_vars = [] with tf.variable_scope(OPTIMIZER_SCOPE): # We can't use the init_value as an initializer as init_value may # itself depend on some problem variables. This would produce # inter-variable initialization order dependence which TensorFlow # sucks at making easy. for init_value in init_values: name = create_local_state_variable_name(init_value) unique_name = name + "_" + str(variable_use_count[name]) variable_use_count[name] += 1 # The overarching idea here is to be able to reuse variables between # different sessions on the same TensorFlow master without errors. By # uniquifying based on the type and name we mirror the checks made inside # TensorFlow, while still allowing some memory reuse. Ultimately this is a # hack due to the broken Session.reset(). local_vars.append( tf.get_local_variable( unique_name, initializer=tf.zeros( init_value.get_shape(), dtype=init_value.dtype))) # It makes things a lot simpler if we use the init_value the first # iteration, instead of the variable itself. It allows us to propagate # gradients through it as well as simplifying initialization. The variable # ends up assigned to after the first iteration. vals = tf.cond(return_init_values, lambda: init_values, lambda: local_vars) if len(init_values) == 1: # tf.cond extracts elements from singleton lists. vals = [vals] return local_vars, vals
Example #10
Source File: trainable_optimizer.py From multilabel-image-classification-tensorflow with MIT License | 4 votes |
def local_state_variables(init_values, return_init_values): """Create local variables initialized from init_values. This will create local variables from a list of init_values. Each variable will be named based on the value's shape and dtype. As a convenience, a boolean tensor allows you to return value from the created local variable or from the original init value. Args: init_values: iterable of tensors return_init_values: boolean tensor Returns: local_vars: list of the created local variables. vals: if return_init_values is true, then this returns the values of init_values. Otherwise it returns the values of the local_vars. """ if not init_values: return [], [] # This generates a harmless warning when saving the metagraph. variable_use_count = tf.get_collection_ref(_LOCAL_STATE_VARIABLE_COLLECTION) if not variable_use_count: variable_use_count.append(collections.defaultdict(int)) variable_use_count = variable_use_count[0] local_vars = [] with tf.variable_scope(OPTIMIZER_SCOPE): # We can't use the init_value as an initializer as init_value may # itself depend on some problem variables. This would produce # inter-variable initialization order dependence which TensorFlow # sucks at making easy. for init_value in init_values: name = create_local_state_variable_name(init_value) unique_name = name + "_" + str(variable_use_count[name]) variable_use_count[name] += 1 # The overarching idea here is to be able to reuse variables between # different sessions on the same TensorFlow master without errors. By # uniquifying based on the type and name we mirror the checks made inside # TensorFlow, while still allowing some memory reuse. Ultimately this is a # hack due to the broken Session.reset(). local_vars.append( tf.get_local_variable( unique_name, initializer=tf.zeros( init_value.get_shape(), dtype=init_value.dtype))) # It makes things a lot simpler if we use the init_value the first # iteration, instead of the variable itself. It allows us to propagate # gradients through it as well as simplifying initialization. The variable # ends up assigned to after the first iteration. vals = tf.cond(return_init_values, lambda: init_values, lambda: local_vars) if len(init_values) == 1: # tf.cond extracts elements from singleton lists. vals = [vals] return local_vars, vals