Python tensorflow.variables_initializer() Examples
The following are 30
code examples of tensorflow.variables_initializer().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: tf_models.py From Deep-Learning-with-TensorFlow-Second-Edition with MIT License | 6 votes |
def _stochastic_gradient_descent(self, _data): """ Performs stochastic gradient descend optimization algorithm. :param _data: array-like, shape = (n_samples, n_features) :return: """ for iteration in range(1, self.n_epochs + 1): idx = np.random.permutation(len(_data)) data = _data[idx] for batch in batch_generator(self.batch_size, data): if len(batch) < self.batch_size: # Pad with zeros pad = np.zeros((self.batch_size - batch.shape[0], batch.shape[1]), dtype=batch.dtype) batch = np.vstack((batch, pad)) sess.run(tf.variables_initializer(self.random_variables)) # Need to re-sample from uniform distribution sess.run([self.update_W, self.update_b, self.update_c], feed_dict={self.visible_units_placeholder: batch}) if self.verbose: error = self._compute_reconstruction_error(data) print(">> Epoch %d finished \tRBM Reconstruction error %f" % (iteration, error))
Example #2
Source File: attack_util.py From robust_physical_perturbations with MIT License | 6 votes |
def build_optimization(self, regularization_losses, *args, **kwargs): #kwargs to allow overriding beta1, beta2, and epsilon of adam, # kwargs will be passed as is to Adam's initialization assert self.adv_pred is not None, "build_model must be called before build_optimization" assert self.output_shape is not None self.attack_target = tf.placeholder(tf.float32, shape=self.output_shape, name="attack_target") self.learning_rate = tf.placeholder(tf.float32, shape=(), name="learning_rate") self.reg_names = regularization_losses self.total_loss = self._build_loss(regularization_losses, self.adv_pred, self.attack_target, self.noise) with tf.name_scope("adamoptimizer"): self.optimization_op = tf.train.AdamOptimizer(learning_rate=self.learning_rate, **kwargs) \ .minimize(self.total_loss, \ var_list=[self.noise]) self.init_adam = tf.variables_initializer(filter(lambda x: "adam" in x.name.lower(), tf.global_variables())) self.init_noise = tf.variables_initializer(set(tf.global_variables()) - set(self.model_vars)) return self.optimization_op
Example #3
Source File: running_mean_std.py From Reinforcement_Learning_for_Traffic_Light_Control with Apache License 2.0 | 6 votes |
def __init__(self, epsilon=1e-4, shape=(), scope=''): sess = get_session() self._new_mean = tf.placeholder(shape=shape, dtype=tf.float64) self._new_var = tf.placeholder(shape=shape, dtype=tf.float64) self._new_count = tf.placeholder(shape=(), dtype=tf.float64) with tf.variable_scope(scope, reuse=tf.AUTO_REUSE): self._mean = tf.get_variable('mean', initializer=np.zeros(shape, 'float64'), dtype=tf.float64) self._var = tf.get_variable('std', initializer=np.ones(shape, 'float64'), dtype=tf.float64) self._count = tf.get_variable('count', initializer=np.full((), epsilon, 'float64'), dtype=tf.float64) self.update_ops = tf.group([ self._var.assign(self._new_var), self._mean.assign(self._new_mean), self._count.assign(self._new_count) ]) sess.run(tf.variables_initializer([self._mean, self._var, self._count])) self.sess = sess self._set_mean_var_count()
Example #4
Source File: tf_util.py From stable-baselines with MIT License | 6 votes |
def initialize(sess=None): """ Initialize all the uninitialized variables in the global scope. :param sess: (TensorFlow Session) """ if sess is None: sess = tf.get_default_session() new_variables = set(tf.global_variables()) - ALREADY_INITIALIZED sess.run(tf.variables_initializer(new_variables)) ALREADY_INITIALIZED.update(new_variables) # ================================================================ # Theano-like Function # ================================================================
Example #5
Source File: Trainer.py From MOTSFusion with MIT License | 6 votes |
def create_optimizer(self, config): momentum = config.float("momentum", 0.9) if self.opt_str == "sgd_nesterov": return tf.train.MomentumOptimizer(self.lr_var, momentum, use_nesterov=True), None elif self.opt_str == "sgd_momentum": return tf.train.MomentumOptimizer(self.lr_var, momentum), None elif self.opt_str == "sgd": return tf.train.GradientDescentOptimizer(self.lr_var), None elif self.opt_str == "adam": opt = tf.train.AdamOptimizer(self.lr_var) all_vars = tf.global_variables() opt_vars = [v for v in all_vars if "Adam" in v.name] reset_opt_op = tf.variables_initializer(opt_vars, "reset_optimizer") return opt, reset_opt_op elif self.opt_str == "none": return None, None else: assert False, ("unknown optimizer", self.opt_str)
Example #6
Source File: running_mean_std.py From Reinforcement_Learning_for_Traffic_Light_Control with Apache License 2.0 | 6 votes |
def __init__(self, epsilon=1e-4, shape=(), scope=''): sess = get_session() self._new_mean = tf.placeholder(shape=shape, dtype=tf.float64) self._new_var = tf.placeholder(shape=shape, dtype=tf.float64) self._new_count = tf.placeholder(shape=(), dtype=tf.float64) with tf.variable_scope(scope, reuse=tf.AUTO_REUSE): self._mean = tf.get_variable('mean', initializer=np.zeros(shape, 'float64'), dtype=tf.float64) self._var = tf.get_variable('std', initializer=np.ones(shape, 'float64'), dtype=tf.float64) self._count = tf.get_variable('count', initializer=np.full((), epsilon, 'float64'), dtype=tf.float64) self.update_ops = tf.group([ self._var.assign(self._new_var), self._mean.assign(self._new_mean), self._count.assign(self._new_count) ]) sess.run(tf.variables_initializer([self._mean, self._var, self._count])) self.sess = sess self._set_mean_var_count()
Example #7
Source File: running_mean_std.py From HardRLWithYoutube with MIT License | 6 votes |
def __init__(self, epsilon=1e-4, shape=(), scope=''): sess = get_session() self._new_mean = tf.placeholder(shape=shape, dtype=tf.float64) self._new_var = tf.placeholder(shape=shape, dtype=tf.float64) self._new_count = tf.placeholder(shape=(), dtype=tf.float64) with tf.variable_scope(scope, reuse=tf.AUTO_REUSE): self._mean = tf.get_variable('mean', initializer=np.zeros(shape, 'float64'), dtype=tf.float64) self._var = tf.get_variable('std', initializer=np.ones(shape, 'float64'), dtype=tf.float64) self._count = tf.get_variable('count', initializer=np.full((), epsilon, 'float64'), dtype=tf.float64) self.update_ops = tf.group([ self._var.assign(self._new_var), self._mean.assign(self._new_mean), self._count.assign(self._new_count) ]) sess.run(tf.variables_initializer([self._mean, self._var, self._count])) self.sess = sess self._set_mean_var_count()
Example #8
Source File: dqn_utils.py From cs294-112_hws with MIT License | 6 votes |
def initialize_interdependent_variables(session, vars_list, feed_dict): """Initialize a list of variables one at a time, which is useful if initialization of some variables depends on initialization of the others. """ vars_left = vars_list while len(vars_left) > 0: new_vars_left = [] for v in vars_left: try: # If using an older version of TensorFlow, uncomment the line # below and comment out the line after it. #session.run(tf.initialize_variables([v]), feed_dict) session.run(tf.variables_initializer([v]), feed_dict) except tf.errors.FailedPreconditionError: new_vars_left.append(v) if len(new_vars_left) >= len(vars_left): # This can happend if the variables all depend on each other, or more likely if there's # another variable outside of the list, that still needs to be initialized. This could be # detected here, but life's finite. raise Exception("Cycle in variable dependencies, or extenrnal precondition unsatisfied.") else: vars_left = new_vars_left
Example #9
Source File: tfutil.py From disentangling_conditional_gans with MIT License | 6 votes |
def create_session(config_dict=dict(), force_as_default=False): config = tf.ConfigProto() for key, value in config_dict.items(): fields = key.split('.') obj = config for field in fields[:-1]: obj = getattr(obj, field) setattr(obj, fields[-1], value) session = tf.Session(config=config) if force_as_default: session._default_session = session.as_default() session._default_session.enforce_nesting = False session._default_session.__enter__() return session #---------------------------------------------------------------------------- # Initialize all tf.Variables that have not already been initialized. # Equivalent to the following, but more efficient and does not bloat the tf graph: # tf.variables_initializer(tf.report_unitialized_variables()).run()
Example #10
Source File: utils_tf.py From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License | 6 votes |
def initialize_uninitialized_global_variables(sess): """ Only initializes the variables of a TensorFlow session that were not already initialized. :param sess: the TensorFlow session :return: """ # List all global variables global_vars = tf.global_variables() # Find initialized status for all variables is_var_init = [tf.is_variable_initialized(var) for var in global_vars] is_initialized = sess.run(is_var_init) # List all variables that were not initialized previously not_initialized_vars = [var for (var, init) in zip(global_vars, is_initialized) if not init] # Initialize all uninitialized variables found, if any if len(not_initialized_vars): sess.run(tf.variables_initializer(not_initialized_vars))
Example #11
Source File: saver.py From imgcomp-cvpr with GNU General Public License v3.0 | 6 votes |
def __init__(self, ckpt_dir, **kwargs_saver): """ :param ckpt_dir: where to save data :param kwargs_saver: Passed on to the tf.train.Saver that will be created """ os.makedirs(ckpt_dir, exist_ok=True) self.ckpt_dir = ckpt_dir self.ckpt_base_file_path = path.join(ckpt_dir, _CKPT_FN) all_saveable_vars = tf_helpers.all_saveable_objects() var_list = kwargs_saver.get('var_list', all_saveable_vars) var_names = VarNames(ckpt_dir) if not var_names.exists(): print('Saver for {} saves {} variables...'.format(self.ckpt_dir, len(var_list))) var_names.write([v.name for v in var_list]) unrestored_vars = [v for v in all_saveable_vars if v not in var_list] if unrestored_vars: print('Found {} unrestored variables'.format(len(unrestored_vars))) self.init_unrestored_op = (tf.variables_initializer(unrestored_vars) if unrestored_vars else tf.no_op()) self.saver = tf.train.Saver(**kwargs_saver)
Example #12
Source File: running_mean_std.py From Reinforcement_Learning_for_Traffic_Light_Control with Apache License 2.0 | 6 votes |
def __init__(self, epsilon=1e-4, shape=(), scope=''): sess = get_session() self._new_mean = tf.placeholder(shape=shape, dtype=tf.float64) self._new_var = tf.placeholder(shape=shape, dtype=tf.float64) self._new_count = tf.placeholder(shape=(), dtype=tf.float64) with tf.variable_scope(scope, reuse=tf.AUTO_REUSE): self._mean = tf.get_variable('mean', initializer=np.zeros(shape, 'float64'), dtype=tf.float64) self._var = tf.get_variable('std', initializer=np.ones(shape, 'float64'), dtype=tf.float64) self._count = tf.get_variable('count', initializer=np.full((), epsilon, 'float64'), dtype=tf.float64) self.update_ops = tf.group([ self._var.assign(self._new_var), self._mean.assign(self._new_mean), self._count.assign(self._new_count) ]) sess.run(tf.variables_initializer([self._mean, self._var, self._count])) self.sess = sess self._set_mean_var_count()
Example #13
Source File: trainer.py From dvae with Apache License 2.0 | 6 votes |
def _initialize_metrics(self): """ Initialize the model metrics """ self.metrics = {} self.metric_values = {} self.update_metrics = {} self.reset_metrics = {} for data_scope in (Data.TRAIN, Data.VALIDATE, Data.TEST): metrics = self.collect_metrics(data_scope) self.metrics[data_scope] = metrics self.metric_values[data_scope] = { name: metric['scalar'] for name, metric in iteritems(metrics)} self.update_metrics[data_scope] = [ metric['update_op'] for metric in itervalues(metrics)] metric_variables = [] with stats_utils.metric_scope(data_scope, graph=self.graph) as scope: for local in tf.get_collection(tf.GraphKeys.LOCAL_VARIABLES, scope): metric_variables.append(local) self.reset_metrics[data_scope] = tf.variables_initializer(metric_variables)
Example #14
Source File: utils_tf.py From robust_physical_perturbations with MIT License | 6 votes |
def initialize_uninitialized_global_variables(sess): """ Only initializes the variables of a TensorFlow session that were not already initialized. :param sess: the TensorFlow session :return: """ # List all global variables global_vars = tf.global_variables() # Find initialized status for all variables is_var_init = [tf.is_variable_initialized(var) for var in global_vars] is_initialized = sess.run(is_var_init) # List all variables that were not initialized previously not_initialized_vars = [var for (var, init) in zip(global_vars, is_initialized) if not init] # Initialize all uninitialized variables found, if any if len(not_initialized_vars): sess.run(tf.variables_initializer(not_initialized_vars))
Example #15
Source File: variables.py From lambda-packs with MIT License | 6 votes |
def variables_initializer(var_list, name="init"): """Returns an Op that initializes a list of variables. After you launch the graph in a session, you can run the returned Op to initialize all the variables in `var_list`. This Op runs all the initializers of the variables in `var_list` in parallel. Calling `initialize_variables()` is equivalent to passing the list of initializers to `Group()`. If `var_list` is empty, however, the function still returns an Op that can be run. That Op just has no effect. Args: var_list: List of `Variable` objects to initialize. name: Optional name for the returned operation. Returns: An Op that run the initializers of all the specified variables. """ if var_list: return control_flow_ops.group(*[v.initializer for v in var_list], name=name) return control_flow_ops.no_op(name=name)
Example #16
Source File: tf_models.py From Deep-Learning-with-TensorFlow-Second-Edition with MIT License | 6 votes |
def fit(self, X): """ Fit a model given data. :param X: array-like, shape = (n_samples, n_features) :return: """ self.n_visible_units = X.shape[1] # Initialize RBM parameters self._build_model() sess.run(tf.variables_initializer([self.W, self.c, self.b])) if self.optimization_algorithm == 'sgd': self._stochastic_gradient_descent(X) else: raise ValueError("Invalid optimization algorithm.") return
Example #17
Source File: running_mean_std.py From ICML2019-TREX with MIT License | 6 votes |
def __init__(self, epsilon=1e-4, shape=(), scope=''): sess = get_session() self._new_mean = tf.placeholder(shape=shape, dtype=tf.float64) self._new_var = tf.placeholder(shape=shape, dtype=tf.float64) self._new_count = tf.placeholder(shape=(), dtype=tf.float64) with tf.variable_scope(scope, reuse=tf.AUTO_REUSE): self._mean = tf.get_variable('mean', initializer=np.zeros(shape, 'float64'), dtype=tf.float64) self._var = tf.get_variable('std', initializer=np.ones(shape, 'float64'), dtype=tf.float64) self._count = tf.get_variable('count', initializer=np.full((), epsilon, 'float64'), dtype=tf.float64) self.update_ops = tf.group([ self._var.assign(self._new_var), self._mean.assign(self._new_mean), self._count.assign(self._new_count) ]) sess.run(tf.variables_initializer([self._mean, self._var, self._count])) self.sess = sess self._set_mean_var_count()
Example #18
Source File: running_mean_std.py From ICML2019-TREX with MIT License | 6 votes |
def __init__(self, epsilon=1e-4, shape=(), scope=''): sess = get_session() self._new_mean = tf.placeholder(shape=shape, dtype=tf.float64) self._new_var = tf.placeholder(shape=shape, dtype=tf.float64) self._new_count = tf.placeholder(shape=(), dtype=tf.float64) with tf.variable_scope(scope, reuse=tf.AUTO_REUSE): self._mean = tf.get_variable('mean', initializer=np.zeros(shape, 'float64'), dtype=tf.float64) self._var = tf.get_variable('std', initializer=np.ones(shape, 'float64'), dtype=tf.float64) self._count = tf.get_variable('count', initializer=np.full((), epsilon, 'float64'), dtype=tf.float64) self.update_ops = tf.group([ self._var.assign(self._new_var), self._mean.assign(self._new_mean), self._count.assign(self._new_count) ]) sess.run(tf.variables_initializer([self._mean, self._var, self._count])) self.sess = sess self._set_mean_var_count()
Example #19
Source File: test_normalization.py From inferbeddings with MIT License | 6 votes |
def test_normalization(): embedding_initializer = tf.contrib.layers.xavier_initializer() embedding_layer = tf.get_variable('embeddings', shape=[1024, 100], initializer=embedding_initializer) unit_sphere_embeddings = constraints.unit_sphere(embedding_layer, norm=1.0) init_op = tf.variables_initializer([embedding_layer]) with tf.Session() as session: for _ in range(256): session.run(init_op) embeddings = session.run(embedding_layer) # Use TensorFlow for normalizing the embeddings session.run(unit_sphere_embeddings) normalized_v1 = session.run(embedding_layer) # Use NumPy for normalizing the embeddings normalized_v2 = embeddings / np.linalg.norm(embeddings, axis=1).reshape((-1, 1)) np.testing.assert_allclose(normalized_v1, normalized_v2, rtol=1e-6)
Example #20
Source File: variables.py From auto-alt-text-lambda-api with MIT License | 6 votes |
def variables_initializer(var_list, name="init"): """Returns an Op that initializes a list of variables. After you launch the graph in a session, you can run the returned Op to initialize all the variables in `var_list`. This Op runs all the initializers of the variables in `var_list` in parallel. Calling `initialize_variables()` is equivalent to passing the list of initializers to `Group()`. If `var_list` is empty, however, the function still returns an Op that can be run. That Op just has no effect. Args: var_list: List of `Variable` objects to initialize. name: Optional name for the returned operation. Returns: An Op that run the initializers of all the specified variables. """ if var_list: return control_flow_ops.group(*[v.initializer for v in var_list], name=name) return control_flow_ops.no_op(name=name)
Example #21
Source File: dqn_utils.py From rl_algorithms with MIT License | 6 votes |
def initialize_interdependent_variables(session, vars_list, feed_dict): """Initialize a list of variables one at a time, which is useful if initialization of some variables depends on initialization of the others. """ vars_left = vars_list while len(vars_left) > 0: new_vars_left = [] for v in vars_left: try: # If using an older version of TensorFlow, uncomment the line # below and comment out the line after it. #session.run(tf.initialize_variables([v]), feed_dict) session.run(tf.variables_initializer([v]), feed_dict) except tf.errors.FailedPreconditionError: new_vars_left.append(v) if len(new_vars_left) >= len(vars_left): # This can happend if the variables all depend on each other, or more likely if there's # another variable outside of the list, that still needs to be initialized. This could be # detected here, but life's finite. raise Exception("Cycle in variable dependencies, or extenrnal precondition unsatisfied.") else: vars_left = new_vars_left
Example #22
Source File: test_core_utils.py From lm-human-preferences with MIT License | 6 votes |
def test_sample_buffer(): capacity = 100 batch = 17 lots = 100 with tf.Graph().as_default(), tf.Session() as sess: buffer = utils.SampleBuffer(capacity=capacity, schemas=dict(x=utils.Schema(tf.int32, ()))) tf.variables_initializer(tf.global_variables() + tf.local_variables()).run() i_p = tf.placeholder(dtype=tf.int32, shape=()) add = buffer.add(x=batch * i_p + tf.range(batch)) sample = buffer.sample(lots, seed=7)['x'] all_data_1 = buffer.data() all_data_2 = buffer.read(tf.range(buffer.size())) for i in range(20): add.run(feed_dict={i_p: i}) samples = sample.eval() hi = batch * (i + 1) lo = max(0, hi - capacity) assert lo <= samples.min() <= lo + 3 assert hi - 5 <= samples.max() < hi np.testing.assert_equal(sess.run(all_data_1), sess.run(all_data_2))
Example #23
Source File: utils_tf.py From robust_physical_perturbations with MIT License | 6 votes |
def initialize_uninitialized_global_variables(sess): """ Only initializes the variables of a TensorFlow session that were not already initialized. :param sess: the TensorFlow session :return: """ # List all global variables global_vars = tf.global_variables() # Find initialized status for all variables is_var_init = [tf.is_variable_initialized(var) for var in global_vars] is_initialized = sess.run(is_var_init) # List all variables that were not initialized previously not_initialized_vars = [var for (var, init) in zip(global_vars, is_initialized) if not init] # Initialize all uninitialized variables found, if any if len(not_initialized_vars): sess.run(tf.variables_initializer(not_initialized_vars))
Example #24
Source File: trainer.py From dvae with Apache License 2.0 | 6 votes |
def _init_variables(self): """ Create the initialization operation for the variables """ # Adam optimizer uses two variables that can only be accessed through the use of a protected # function since the variables aren't scoped in anyway. Trying to add a tf.variable_scope # around apply_gradients where the variables are created did not help. var_list = set(self.optimizer._get_beta_accumulators()) # pylint: disable=protected-access slot_names = self.optimizer.get_slot_names() for tower in self.towers: variables = tower.global_variables var_list.update(variables) for slot_name in slot_names: for variable in variables: slot = self.optimizer.get_slot(variable, slot_name) if slot is not None: var_list.add(slot) # Initialize all the variables self.initialization_operation = tf.group( tf.variables_initializer(var_list), # Apparently local variables are not part of 'all' variables... go figure # This is needed for metrics for example tf.local_variables_initializer())
Example #25
Source File: model.py From object-detection with MIT License | 5 votes |
def yolo_non_max_suppression(scores, boxes, classes, max_boxes = 10, iou_threshold = 0.5): max_boxes_tensor = K.variable(max_boxes, dtype='int32') # tensor to be used in tf.image.non_max_suppression() K.get_session().run(tf.variables_initializer([max_boxes_tensor])) # initialize variable max_boxes_tensor # Use tf.image.non_max_suppression() to get the list of indices corresponding to boxes you keep nms_indices = tf.image.non_max_suppression(boxes, scores, max_boxes, iou_threshold) # Use K.gather() to select only nms_indices from scores, boxes and classes scores = K.gather(scores, nms_indices) boxes = K.gather(boxes, nms_indices) classes = K.gather(classes, nms_indices) return scores, boxes, classes
Example #26
Source File: tf_models.py From Deep-Learning-with-TensorFlow-Second-Edition with MIT License | 5 votes |
def from_dict(cls, dct_to_load): weights = {var_name: dct_to_load.pop(var_name) for var_name in cls._get_weight_variables_names()} _activation_function_class = dct_to_load.pop('_activation_function_class') n_visible_units = dct_to_load.pop('n_visible_units') instance = cls(**dct_to_load) setattr(instance, '_activation_function_class', _activation_function_class) setattr(instance, 'n_visible_units', n_visible_units) # Initialize RBM parameters instance._build_model(weights) sess.run(tf.variables_initializer([getattr(instance, name) for name in cls._get_weight_variables_names()])) return instance
Example #27
Source File: train_val.py From tf_ctpn with MIT License | 5 votes |
def initialize(self, sess): # Initial file lists are empty np_paths = [] ss_paths = [] variables = tf.global_variables() # Initialize all variables first sess.run(tf.variables_initializer(variables, name='init')) if self.pretrained_model is not None: if self.pretrained_model.endswith('.ckpt'): # Fresh train directly from ImageNet weights print('Loading initial model weights from {:s}'.format(self.pretrained_model)) var_keep_dic = self.get_variables_in_checkpoint_file(self.pretrained_model) # Get the variables to restore, ignoring the variables to fix variables_to_restore = self.net.get_variables_to_restore(variables, var_keep_dic) restorer = tf.train.Saver(variables_to_restore) restorer.restore(sess, self.pretrained_model) print('Loaded.') else: # Restore from checkpoint and meta file self.restore_ckpt_from_dir(sess, self.net, self.pretrained_model) print('Loaded.') last_snapshot_iter = 0 rate = cfg.TRAIN.LEARNING_RATE stepsizes = list(cfg.TRAIN.STEPSIZE) return rate, last_snapshot_iter, stepsizes, np_paths, ss_paths
Example #28
Source File: _model.py From tensorfx with Apache License 2.0 | 5 votes |
def build_init(self): """Builds the initialization sub-graph. The default implementation creates an initialization op that initializes all variables, locals for initialization, and another for all non-traininable variables and tables for local initialization. Initialization is run when the graph is first created, before training. Local initialization is performed after a previously trained model is loaded. Returns: A tuple containing the init op and local init op to use to initialize the graph. """ init_op = tf.variables_initializer(tf.global_variables(), name='init') # For some reason not all local variables are in the local variables collection, but some are in # the global variables collection (such as those setup by reader ops). # So in addition to initializing local variables in the local_init_op, we also initialize the # set of variables in the global variables, that are not trainable. # Just to add to the mix, tables are neither, and so must be explicitly included as well. # All of these will be initialized after restoring from a checkpoint. variables = tf.global_variables() for trainable in tf.trainable_variables(): variables.remove(trainable) local_init_op = tf.group(tf.variables_initializer(variables), tf.variables_initializer(tf.local_variables()), tf.tables_initializer(), name='local_init_op') # Add the local initialization op to the main op collection, which is looked up at model loading # time, and is automatically invoked after it has been loaded. tf.add_to_collection('saved_model_main_op', local_init_op) return init_op, local_init_op
Example #29
Source File: problem_generator.py From yolo_v2 with Apache License 2.0 | 5 votes |
def make_rosenbrock_loss_and_init(device=None): """A variable-backed version of Rosenbrock problem. See the Rosenbrock class for details. Args: device: Where to place the ops of this problem. Returns: A tuple of two callables, first of which creates the loss and the second creates the parameter initializer function. """ def make_rosenbrock_loss(): with tf.name_scope("optimizee"): with tf.device(device): x = tf.get_variable("x", [1]) y = tf.get_variable("y", [1]) c = tf.get_variable( "c", [1], initializer=tf.constant_initializer(100.0), trainable=False) obj = (1 - x)**2 + c * (y - x**2)**2 return tf.squeeze(obj) def make_init_fn(parameters): with tf.device(device): init_op = tf.variables_initializer(parameters) def init_fn(sess): tf.logging.info("Initializing model parameters.") sess.run(init_op) return init_fn return make_rosenbrock_loss, make_init_fn
Example #30
Source File: keras_yolo.py From object-detection with MIT License | 5 votes |
def yolo_eval(yolo_outputs, image_shape, max_boxes=10, score_threshold=.6, iou_threshold=.5): """Evaluate YOLO model on given input batch and return filtered boxes.""" box_confidence, box_xy, box_wh, box_class_probs = yolo_outputs boxes = yolo_boxes_to_corners(box_xy, box_wh) boxes, scores, classes = yolo_filter_boxes( box_confidence, boxes, box_class_probs, threshold=score_threshold) # Scale boxes back to original image shape. height = image_shape[0] width = image_shape[1] image_dims = K.stack([height, width, height, width]) image_dims = K.reshape(image_dims, [1, 4]) boxes = boxes * image_dims # TODO: Something must be done about this ugly hack! max_boxes_tensor = K.variable(max_boxes, dtype='int32') K.get_session().run(tf.variables_initializer([max_boxes_tensor])) nms_index = tf.image.non_max_suppression( boxes, scores, max_boxes_tensor, iou_threshold=iou_threshold) boxes = K.gather(boxes, nms_index) scores = K.gather(scores, nms_index) classes = K.gather(classes, nms_index) return boxes, scores, classes