Python tensorflow.python.ops.variable_scope._get_default_variable_store() Examples
The following are 10
code examples of tensorflow.python.ops.variable_scope._get_default_variable_store().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.ops.variable_scope
, or try the search function
.
Example #1
Source File: pytorch_to_tf.py From coref with Apache License 2.0 | 6 votes |
def load_from_pytorch_checkpoint(checkpoint, assignment_map): pytorch_model = torch.load(checkpoint, map_location='cpu') pt_model_with_tf_keys = my_convert_keys(pytorch_model) for _, name in assignment_map.items(): store_vars = vs._get_default_variable_store()._vars var = store_vars.get(name, None) assert var is not None if name not in pt_model_with_tf_keys: print('WARNING:', name, 'not found in original model.') continue array = pt_model_with_tf_keys[name].cpu().numpy() if any([x in name for x in tensors_to_transpose]): array = array.transpose() assert tuple(var.get_shape().as_list()) == tuple(array.shape) init_value = ops.convert_to_tensor(array, dtype=np.float32) var._initial_value = init_value var._initializer_op = var.assign(init_value)
Example #2
Source File: function.py From lambda-packs with MIT License | 5 votes |
def getvar( self, getter, name, shape=None, dtype=None, initializer=None, reuse=None, trainable=True, collections=None, # pylint: disable=redefined-outer-name use_resource=None, **kwargs): """A custom variable getter.""" # Here, we switch the default graph to the outer graph and ask the # variable scope in which the function is defined to give us the # variable. The variable is stashed in extra_vars and returned to # the caller. # # We capture these variables so that the variable definition is # hoisted upward to the outer most graph. with self._outer_graph.as_default(): # pylint: disable=protected-access var = self._vscope.get_variable( vs._get_default_variable_store(), name, shape=shape, dtype=dtype, initializer=initializer, reuse=reuse, trainable=trainable, collections=collections, use_resource=use_resource) self.extra_vars.append(var) if isinstance(var, resource_variable_ops.ResourceVariable): # For resource-based variables read the variable outside the function # and pass in the value. This ensures that the function is pure and # differentiable. TODO(apassos) this may have performance problems if # the function will only do embedding lookups on the variable. return var.value() return var
Example #3
Source File: function.py From auto-alt-text-lambda-api with MIT License | 5 votes |
def getvar(self, getter, name, shape=None, dtype=None, initializer=None, trainable=True, collections=None, **kwargs): """A custom variable getter.""" # Here, we switch the default graph to the outer graph and ask the # variable scope in which the function is defined to give us the # variable. The variable is stashed in extra_vars and returned to # the caller. # # We capture these variables so that the variable definition is # hoisted upward to the outer most graph. with self._outer_graph.as_default(): # pylint: disable=protected-access var = self._vscope.get_variable( vs._get_default_variable_store(), name, shape=shape, dtype=dtype, initializer=initializer, trainable=trainable, collections=collections) self.extra_vars.append(var) return var
Example #4
Source File: variable_scope_test.py From deep_image_model with Apache License 2.0 | 5 votes |
def testGetVar(self): vs = variable_scope._get_default_variable_store() v = vs.get_variable("v", [1]) v1 = vs.get_variable("v", [1]) assert v == v1
Example #5
Source File: variable_scope_test.py From deep_image_model with Apache License 2.0 | 5 votes |
def testNameExists(self): vs = variable_scope._get_default_variable_store() # No check by default, so we can both create and get existing names. v = vs.get_variable("v", [1]) v1 = vs.get_variable("v", [1]) assert v == v1 # When reuse is False, we fail when variables are already there. vs.get_variable("w", [1], reuse=False) # That's ok. with self.assertRaises(ValueError): vs.get_variable("v", [1], reuse=False) # That fails. # When reuse is True, we fail when variables are new. vs.get_variable("v", [1], reuse=True) # That's ok. with self.assertRaises(ValueError): vs.get_variable("u", [1], reuse=True) # That fails.
Example #6
Source File: variable_scope_test.py From deep_image_model with Apache License 2.0 | 5 votes |
def testNamelessStore(self): vs = variable_scope._get_default_variable_store() vs.get_variable("v1", [2]) vs.get_variable("v2", [2]) expected_names = ["%s:0" % name for name in ["v1", "v2"]] self.assertEqual(set(expected_names), set([v.name for v in vs._vars.values()]))
Example #7
Source File: function.py From deep_image_model with Apache License 2.0 | 5 votes |
def getvar(self, name, shape=None, dtype=None, initializer=None, trainable=True, collections=None, **kwargs): """A custom variable getter.""" # Here, we switch the default graph to the outer graph and ask the # variable scope in which the function is defined to give us the # variable. The variable is stashed in extra_vars and returned to # the caller. # # We capture these variables so that the variable definition is # hoisted upward to the outer most graph. with self._outer_graph.as_default(): # pylint: disable=protected-access var = self._vscope.get_variable( vs._get_default_variable_store(), name, shape=shape, dtype=dtype, initializer=initializer, trainable=trainable, collections=collections) self.extra_vars.append(var) return var
Example #8
Source File: graph_callable.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def _default_initializer(name, shape, dtype): """The default initializer for variables.""" # pylint: disable=protected-access store = variable_scope._get_default_variable_store() initializer = store._get_default_initializer(name, shape=shape, dtype=dtype) # pylint: enable=protected-access return initializer[0]
Example #9
Source File: function.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def getvar( self, getter, name, shape=None, dtype=None, initializer=None, reuse=None, trainable=True, collections=None, # pylint: disable=redefined-outer-name use_resource=None, **kwargs): """A custom variable getter.""" # Here, we switch the default graph to the outer graph and ask the # variable scope in which the function is defined to give us the # variable. The variable is stashed in extra_vars and returned to # the caller. # # We capture these variables so that the variable definition is # hoisted upward to the outer most graph. with self._outer_graph.as_default(): # pylint: disable=protected-access var = self._vscope.get_variable( vs._get_default_variable_store(), name, shape=shape, dtype=dtype, initializer=initializer, reuse=reuse, trainable=trainable, collections=collections, use_resource=use_resource) self.extra_vars.append(var) if isinstance(var, resource_variable_ops.ResourceVariable): # For resource-based variables read the variable outside the function # and pass in the value. This ensures that the function is pure and # differentiable. TODO(apassos) this may have performance problems if # the function will only do embedding lookups on the variable. return var.value() return var
Example #10
Source File: function.py From keras-lambda with MIT License | 5 votes |
def getvar(self, getter, name, shape=None, dtype=None, initializer=None, trainable=True, collections=None, **kwargs): """A custom variable getter.""" # Here, we switch the default graph to the outer graph and ask the # variable scope in which the function is defined to give us the # variable. The variable is stashed in extra_vars and returned to # the caller. # # We capture these variables so that the variable definition is # hoisted upward to the outer most graph. with self._outer_graph.as_default(): # pylint: disable=protected-access var = self._vscope.get_variable( vs._get_default_variable_store(), name, shape=shape, dtype=dtype, initializer=initializer, trainable=trainable, collections=collections) self.extra_vars.append(var) return var