Python tensorflow.is_variable_initialized() Examples
The following are 30
code examples of tensorflow.is_variable_initialized().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: tfutil.py From transparent_latent_gan with MIT License | 6 votes |
def _create_autosummary_var(name, value_expr): assert not _autosummary_finalized v = tf.cast(value_expr, tf.float32) if v.shape.ndims is 0: v = [v, np.float32(1.0)] elif v.shape.ndims is 1: v = [tf.reduce_sum(v), tf.cast(tf.shape(v)[0], tf.float32)] else: v = [tf.reduce_sum(v), tf.reduce_prod(tf.cast(tf.shape(v), tf.float32))] v = tf.cond(tf.is_finite(v[0]), lambda: tf.stack(v), lambda: tf.zeros(2)) with tf.control_dependencies(None): var = tf.Variable(tf.zeros(2)) # [numerator, denominator] update_op = tf.cond(tf.is_variable_initialized(var), lambda: tf.assign_add(var, v), lambda: tf.assign(var, v)) if name in _autosummary_vars: _autosummary_vars[name].append(var) else: _autosummary_vars[name] = [var] return update_op #---------------------------------------------------------------------------- # Call filewriter.add_summary() with all summaries in the default graph, # automatically finalizing and merging them on the first call.
Example #2
Source File: session_manager_test.py From deep_image_model with Apache License 2.0 | 6 votes |
def testPrepareSessionDidNotInitLocalVariable(self): with tf.Graph().as_default(): v = tf.Variable(1, name="v") w = tf.Variable( v, trainable=False, collections=[tf.GraphKeys.LOCAL_VARIABLES], name="w") with self.test_session(): self.assertEqual(False, tf.is_variable_initialized(v).eval()) self.assertEqual(False, tf.is_variable_initialized(w).eval()) sm2 = tf.train.SessionManager( ready_op=tf.report_uninitialized_variables()) with self.assertRaisesRegexp(RuntimeError, "Init operations did not make model ready"): sm2.prepare_session("", init_op=v.initializer)
Example #3
Source File: session_manager_test.py From deep_image_model with Apache License 2.0 | 6 votes |
def testPrepareSessionWithReadyNotReadyForLocal(self): with tf.Graph().as_default(): v = tf.Variable(1, name="v") w = tf.Variable( v, trainable=False, collections=[tf.GraphKeys.LOCAL_VARIABLES], name="w") with self.test_session(): self.assertEqual(False, tf.is_variable_initialized(v).eval()) self.assertEqual(False, tf.is_variable_initialized(w).eval()) sm2 = tf.train.SessionManager( ready_op=tf.report_uninitialized_variables(), ready_for_local_init_op=tf.report_uninitialized_variables( tf.all_variables()), local_init_op=w.initializer) with self.assertRaisesRegexp( RuntimeError, "Init operations did not make model ready for local_init"): sm2.prepare_session("", init_op=None)
Example #4
Source File: tfutil.py From tileGAN with GNU General Public License v3.0 | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #5
Source File: tfutil.py From tileGAN with GNU General Public License v3.0 | 6 votes |
def _create_autosummary_var(name, value_expr): assert not _autosummary_finalized v = tf.cast(value_expr, tf.float32) if v.shape.ndims is 0: v = [v, np.float32(1.0)] elif v.shape.ndims is 1: v = [tf.reduce_sum(v), tf.cast(tf.shape(v)[0], tf.float32)] else: v = [tf.reduce_sum(v), tf.reduce_prod(tf.cast(tf.shape(v), tf.float32))] v = tf.cond(tf.is_finite(v[0]), lambda: tf.stack(v), lambda: tf.zeros(2)) with tf.control_dependencies(None): var = tf.Variable(tf.zeros(2)) # [numerator, denominator] update_op = tf.cond(tf.is_variable_initialized(var), lambda: tf.assign_add(var, v), lambda: tf.assign(var, v)) if name in _autosummary_vars: _autosummary_vars[name].append(var) else: _autosummary_vars[name] = [var] return update_op #---------------------------------------------------------------------------- # Call filewriter.add_summary() with all summaries in the default graph, # automatically finalizing and merging them on the first call.
Example #6
Source File: nmt_generator.py From NMT_GAN with Apache License 2.0 | 6 votes |
def init_and_reload(self): ########## # this function is only used for the gan training with reload ########## params = [param for param in tf.trainable_variables() if 'generate' in param.name] #params = [param for param in tf.all_variables()] if not self.sess.run(tf.is_variable_initialized(params[0])): #init_op = tf.initialize_variables(params) init_op = tf.global_variables_initializer() ## this is important here to initialize_all_variables() self.sess.run(init_op) saver = tf.train.Saver(params) self.saver=saver if self.gen_reload: ##here must be true print('reloading params from %s '% self.saveto) self.saver.restore(self.sess, './'+self.saveto) print('reloading params done') else: print('error, reload must be true!!')
Example #7
Source File: utils.py From delira with GNU Affero General Public License v3.0 | 6 votes |
def initialize_uninitialized(sess): """ Function to initialize only uninitialized variables in a session graph Parameters ---------- sess : tf.Session() """ global_vars = tf.global_variables() is_not_initialized = sess.run( [tf.is_variable_initialized(var) for var in global_vars]) not_initialized_vars = [v for (v, f) in zip( global_vars, is_not_initialized) if not f] if not_initialized_vars: sess.run(tf.variables_initializer(not_initialized_vars))
Example #8
Source File: utils.py From ssd-tensorflow with GNU General Public License v3.0 | 6 votes |
def initialize_uninitialized_variables(sess): """ Only initialize the weights that have not yet been initialized by other means, such as importing a metagraph and a checkpoint. It's useful when extending an existing model. """ uninit_vars = [] uninit_tensors = [] for var in tf.global_variables(): uninit_vars.append(var) uninit_tensors.append(tf.is_variable_initialized(var)) uninit_bools = sess.run(uninit_tensors) uninit = zip(uninit_bools, uninit_vars) uninit = [var for init, var in uninit if not init] sess.run(tf.variables_initializer(uninit)) #-------------------------------------------------------------------------------
Example #9
Source File: tfutil.py From transparent_latent_gan with MIT License | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #10
Source File: tfutil.py From disentangling_conditional_gans with MIT License | 6 votes |
def _create_autosummary_var(name, value_expr): assert not _autosummary_finalized v = tf.cast(value_expr, tf.float32) if v.shape.ndims is 0: v = [v, np.float32(1.0)] elif v.shape.ndims is 1: v = [tf.reduce_sum(v), tf.cast(tf.shape(v)[0], tf.float32)] else: v = [tf.reduce_sum(v), tf.reduce_prod(tf.cast(tf.shape(v), tf.float32))] v = tf.cond(tf.is_finite(v[0]), lambda: tf.stack(v), lambda: tf.zeros(2)) with tf.control_dependencies(None): var = tf.Variable(tf.zeros(2)) # [numerator, denominator] update_op = tf.cond(tf.is_variable_initialized(var), lambda: tf.assign_add(var, v), lambda: tf.assign(var, v)) if name in _autosummary_vars: _autosummary_vars[name].append(var) else: _autosummary_vars[name] = [var] return update_op #---------------------------------------------------------------------------- # Call filewriter.add_summary() with all summaries in the default graph, # automatically finalizing and merging them on the first call.
Example #11
Source File: tfutil.py From higan with MIT License | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #12
Source File: tfutil.py From higan with MIT License | 6 votes |
def _create_autosummary_var(name, value_expr): assert not _autosummary_finalized v = tf.cast(value_expr, tf.float32) if v.shape.ndims is 0: v = [v, np.float32(1.0)] elif v.shape.ndims is 1: v = [tf.reduce_sum(v), tf.cast(tf.shape(v)[0], tf.float32)] else: v = [tf.reduce_sum(v), tf.reduce_prod(tf.cast(tf.shape(v), tf.float32))] v = tf.cond(tf.is_finite(v[0]), lambda: tf.stack(v), lambda: tf.zeros(2)) with tf.control_dependencies(None): var = tf.Variable(tf.zeros(2)) # [numerator, denominator] update_op = tf.cond(tf.is_variable_initialized(var), lambda: tf.assign_add(var, v), lambda: tf.assign(var, v)) if name in _autosummary_vars: _autosummary_vars[name].append(var) else: _autosummary_vars[name] = [var] return update_op #---------------------------------------------------------------------------- # Call filewriter.add_summary() with all summaries in the default graph, # automatically finalizing and merging them on the first call.
Example #13
Source File: base_model.py From defensegan with Apache License 2.0 | 6 votes |
def initialize_uninitialized(self, ): """Only initializes the variables of a TensorFlow session that were not already initialized. """ # List all global variables. sess = self.sess global_vars = tf.global_variables() # Find initialized status for all variables. is_var_init = [tf.is_variable_initialized(var) for var in global_vars] is_initialized = sess.run(is_var_init) # List all variables that were not previously initialized. not_initialized_vars = [var for (var, init) in zip(global_vars, is_initialized) if not init] for v in not_initialized_vars: print('[!] not init: {}'.format(v.name)) # Initialize all uninitialized variables found, if any. if len(not_initialized_vars): sess.run(tf.variables_initializer(not_initialized_vars))
Example #14
Source File: tf_utils.py From neupy with MIT License | 6 votes |
def initialize_uninitialized_variables(variables=None): if variables is None: variables = tf.global_variables() if not variables: return session = tensorflow_session() is_not_initialized = session.run([ tf.is_variable_initialized(var) for var in variables]) not_initialized_vars = [ v for (v, f) in zip(variables, is_not_initialized) if not f] if len(not_initialized_vars): session.run(tf.variables_initializer(not_initialized_vars))
Example #15
Source File: tfutil.py From interfacegan with MIT License | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #16
Source File: tfutil.py From interfacegan with MIT License | 6 votes |
def _create_autosummary_var(name, value_expr): assert not _autosummary_finalized v = tf.cast(value_expr, tf.float32) if v.shape.ndims is 0: v = [v, np.float32(1.0)] elif v.shape.ndims is 1: v = [tf.reduce_sum(v), tf.cast(tf.shape(v)[0], tf.float32)] else: v = [tf.reduce_sum(v), tf.reduce_prod(tf.cast(tf.shape(v), tf.float32))] v = tf.cond(tf.is_finite(v[0]), lambda: tf.stack(v), lambda: tf.zeros(2)) with tf.control_dependencies(None): var = tf.Variable(tf.zeros(2)) # [numerator, denominator] update_op = tf.cond(tf.is_variable_initialized(var), lambda: tf.assign_add(var, v), lambda: tf.assign(var, v)) if name in _autosummary_vars: _autosummary_vars[name].append(var) else: _autosummary_vars[name] = [var] return update_op #---------------------------------------------------------------------------- # Call filewriter.add_summary() with all summaries in the default graph, # automatically finalizing and merging them on the first call.
Example #17
Source File: utils_tf.py From cleverhans with MIT License | 6 votes |
def initialize_uninitialized_global_variables(sess): """ Only initializes the variables of a TensorFlow session that were not already initialized. :param sess: the TensorFlow session :return: """ # List all global variables global_vars = tf.global_variables() # Find initialized status for all variables is_var_init = [tf.is_variable_initialized(var) for var in global_vars] is_initialized = sess.run(is_var_init) # List all variables that were not initialized previously not_initialized_vars = [var for (var, init) in zip(global_vars, is_initialized) if not init] # Initialize all uninitialized variables found, if any if len(not_initialized_vars): sess.run(tf.variables_initializer(not_initialized_vars))
Example #18
Source File: tfutil.py From disentangling_conditional_gans with MIT License | 6 votes |
def init_uninited_vars(vars=None): if vars is None: vars = tf.global_variables() test_vars = []; test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(':0', '/IsVariableInitialized:0')) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(':')[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars]) #---------------------------------------------------------------------------- # Set the values of given tf.Variables. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tfutil.run([tf.assign(var, value) for var, value in var_to_value_dict.items()]
Example #19
Source File: utils_tf.py From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License | 6 votes |
def initialize_uninitialized_global_variables(sess): """ Only initializes the variables of a TensorFlow session that were not already initialized. :param sess: the TensorFlow session :return: """ # List all global variables global_vars = tf.global_variables() # Find initialized status for all variables is_var_init = [tf.is_variable_initialized(var) for var in global_vars] is_initialized = sess.run(is_var_init) # List all variables that were not initialized previously not_initialized_vars = [var for (var, init) in zip(global_vars, is_initialized) if not init] # Initialize all uninitialized variables found, if any if len(not_initialized_vars): sess.run(tf.variables_initializer(not_initialized_vars))
Example #20
Source File: utils_tf.py From robust_physical_perturbations with MIT License | 6 votes |
def initialize_uninitialized_global_variables(sess): """ Only initializes the variables of a TensorFlow session that were not already initialized. :param sess: the TensorFlow session :return: """ # List all global variables global_vars = tf.global_variables() # Find initialized status for all variables is_var_init = [tf.is_variable_initialized(var) for var in global_vars] is_initialized = sess.run(is_var_init) # List all variables that were not initialized previously not_initialized_vars = [var for (var, init) in zip(global_vars, is_initialized) if not init] # Initialize all uninitialized variables found, if any if len(not_initialized_vars): sess.run(tf.variables_initializer(not_initialized_vars))
Example #21
Source File: utils_tf.py From robust_physical_perturbations with MIT License | 6 votes |
def initialize_uninitialized_global_variables(sess): """ Only initializes the variables of a TensorFlow session that were not already initialized. :param sess: the TensorFlow session :return: """ # List all global variables global_vars = tf.global_variables() # Find initialized status for all variables is_var_init = [tf.is_variable_initialized(var) for var in global_vars] is_initialized = sess.run(is_var_init) # List all variables that were not initialized previously not_initialized_vars = [var for (var, init) in zip(global_vars, is_initialized) if not init] # Initialize all uninitialized variables found, if any if len(not_initialized_vars): sess.run(tf.variables_initializer(not_initialized_vars))
Example #22
Source File: block_compiler.py From fold with Apache License 2.0 | 5 votes |
def _init_uninitialized(sess): """Initializes all uninitialized variables and returns them as a list.""" variables = tf.global_variables() if not variables: return [] # sess.run() barfs on empty list is_initialized = sess.run([tf.is_variable_initialized(v) for v in variables]) needs_init = [v for v, i in zip(variables, is_initialized) if not i] if not needs_init: return [] sess.run(tf.variables_initializer(needs_init)) return needs_init
Example #23
Source File: tfutil.py From interfacegan with MIT License | 5 votes |
def init_uninitialized_vars(target_vars: List[tf.Variable] = None) -> None: """Initialize all tf.Variables that have not already been initialized. Equivalent to the following, but more efficient and does not bloat the tf graph: tf.variables_initializer(tf.report_uninitialized_variables()).run() """ assert_tf_initialized() if target_vars is None: target_vars = tf.global_variables() test_vars = [] test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in target_vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(":0", "/IsVariableInitialized:0")) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(":")[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars])
Example #24
Source File: model.py From nn_physical_concepts with Apache License 2.0 | 5 votes |
def initialize_uninitialized(sess): global_vars = tf.global_variables() is_not_initialized = sess.run([tf.is_variable_initialized(var) for var in global_vars]) not_initialized_vars = [v for (v, f) in zip(global_vars, is_not_initialized) if not f] if len(not_initialized_vars): sess.run(tf.variables_initializer(not_initialized_vars))
Example #25
Source File: tfutil.py From ai-platform with MIT License | 5 votes |
def init_uninitialized_vars(target_vars: List[tf.Variable] = None) -> None: """Initialize all tf.Variables that have not already been initialized. Equivalent to the following, but more efficient and does not bloat the tf graph: tf.variables_initializer(tf.report_uninitialized_variables()).run() """ assert_tf_initialized() if target_vars is None: target_vars = tf.global_variables() test_vars = [] test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in target_vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(":0", "/IsVariableInitialized:0")) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(":")[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars])
Example #26
Source File: tfutil.py From higan with MIT License | 5 votes |
def init_uninitialized_vars(target_vars: List[tf.Variable] = None) -> None: """Initialize all tf.Variables that have not already been initialized. Equivalent to the following, but more efficient and does not bloat the tf graph: tf.variables_initializer(tf.report_uninitialized_variables()).run() """ assert_tf_initialized() if target_vars is None: target_vars = tf.global_variables() test_vars = [] test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in target_vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(":0", "/IsVariableInitialized:0")) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(":")[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars])
Example #27
Source File: utils.py From AnoGAN-tf with MIT License | 5 votes |
def initialize_uninitialized(sess): global_vars = tf.global_variables() is_not_initialized = sess.run([tf.is_variable_initialized(var) for var in global_vars]) not_initialized_vars = [v for (v,f) in zip(global_vars, is_not_initialized) if not f] print [str(i.name) for i in not_initialized_vars]
Example #28
Source File: tfutil.py From higan with MIT License | 5 votes |
def init_uninitialized_vars(target_vars: List[tf.Variable] = None) -> None: """Initialize all tf.Variables that have not already been initialized. Equivalent to the following, but more efficient and does not bloat the tf graph: tf.variables_initializer(tf.report_uninitialized_variables()).run() """ assert_tf_initialized() if target_vars is None: target_vars = tf.global_variables() test_vars = [] test_ops = [] with tf.control_dependencies(None): # ignore surrounding control_dependencies for var in target_vars: assert is_tf_expression(var) try: tf.get_default_graph().get_tensor_by_name(var.name.replace(":0", "/IsVariableInitialized:0")) except KeyError: # Op does not exist => variable may be uninitialized. test_vars.append(var) with absolute_name_scope(var.name.split(":")[0]): test_ops.append(tf.is_variable_initialized(var)) init_vars = [var for var, inited in zip(test_vars, run(test_ops)) if not inited] run([var.initializer for var in init_vars])
Example #29
Source File: graph_context.py From tensorlang with Apache License 2.0 | 5 votes |
def define_local(self, name, value): if name in self._locals: raise Exception("Local already defined: %s" % name) should_wrap_in_var = False if self._wrap_locals_in_vars: if isinstance(value, tf.Tensor): should_wrap_in_var = True # HACK(adamb) Unwrapping in here really isn't great, since auto-unwrapping can create unexpected behavior. if isinstance(value, RetvalBag) and value.len() == 1: if isinstance(value.get(None), tf.Tensor): should_wrap_in_var = True value = value.get(None) if should_wrap_in_var: variable = state_ops.variable_op_v2( value.get_shape(), value.dtype.base_dtype) with tf.control_dependencies(None): value = tf.identity( tf.cond( tf.is_variable_initialized(variable), lambda: variable, lambda: tf.assign(variable, value) ) ) print("value", value) self._locals[name] = value return value
Example #30
Source File: variable_ops_test.py From deep_image_model with Apache License 2.0 | 5 votes |
def testIsVariableInitialized(self): for use_gpu in [True, False]: with self.test_session(use_gpu=use_gpu): v0 = state_ops.variable_op([1, 2], tf.float32) self.assertEqual(False, tf.is_variable_initialized(v0).eval()) tf.assign(v0, [[2.0, 3.0]]).eval() self.assertEqual(True, tf.is_variable_initialized(v0).eval())