Python tensorflow.python.ops.variables.global_variables() Examples

The following are 30 code examples of tensorflow.python.ops.variables.global_variables(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.ops.variables , or try the search function .
Example #1
Source File: backend.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 6 votes vote down vote up
def _initialize_variables(session):
  """Utility to initialize uninitialized variables on the fly."""
  variables = variables_module.global_variables()
  candidate_vars = []
  for v in variables:
    if not getattr(v, '_keras_initialized', False):
      candidate_vars.append(v)
  # This step is expensive, so we only run it on variables not already
  # marked as initialized.
  is_initialized = session.run(
      [variables_module.is_variable_initialized(v) for v in candidate_vars])
  uninitialized_vars = []
  for flag, v in zip(is_initialized, candidate_vars):
    if not flag:
      uninitialized_vars.append(v)
    v._keras_initialized = True
  if uninitialized_vars:
    session.run(variables_module.variables_initializer(uninitialized_vars)) 
Example #2
Source File: export.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _get_saver():
  """Lazy init and return saver."""
  saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
  if saver is not None:
    if saver:
      saver = saver[0]
    else:
      saver = None
  if saver is None and variables.global_variables():
    saver = tf_saver.Saver(write_version=saver_pb2.SaverDef.V1)
    ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
  return saver 
Example #3
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testLocalVariableNotInAllVariables(self):
    with self.cached_session():
      with variable_scope.variable_scope('A'):
        a = variables_lib2.local_variable(0)
        self.assertNotIn(a, variables_lib.global_variables())
        self.assertIn(a, variables_lib.local_variables()) 
Example #4
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def test_global_variable(self):
    with self.cached_session() as sess:
      self.assertEqual([], variables_lib.global_variables())
      value0 = 42
      variables_lib2.global_variable(value0)
      value1 = 43
      variables_lib2.global_variable(value1)
      variables = variables_lib.global_variables()
      self.assertEqual(2, len(variables))
      with self.assertRaises(errors_impl.FailedPreconditionError):
        sess.run(variables)
      variables_lib.variables_initializer(variables).run()
      self.assertAllEqual(set([value0, value1]), set(sess.run(variables))) 
Example #5
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testVariableNameAndShape(self):
    with self.cached_session():
      with variable_scope.variable_scope('A'):
        a = variables_lib2.global_variable([1, 1, 1, 1, 1], name='a')
        self.assertEqual(a.op.name, 'A/a')
        self.assertListEqual(a.get_shape().as_list(), [5])
        self.assertListEqual([a], variables_lib.global_variables()) 
Example #6
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testGlobalVariableNotInLocalVariables(self):
    with self.cached_session():
      with variable_scope.variable_scope('A'):
        a = variables_lib2.global_variable(0)
        self.assertNotIn(a, variables_lib.local_variables())
        self.assertIn(a, variables_lib.global_variables()) 
Example #7
Source File: variables_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testNotInLocalVariables(self):
    with self.cached_session():
      with variable_scope.variable_scope('A'):
        a = variables_lib2.model_variable('a', [5])
        self.assertIn(a, variables_lib.global_variables())
        self.assertIn(a, ops.get_collection(ops.GraphKeys.MODEL_VARIABLES))
        self.assertNotIn(a, variables_lib.local_variables()) 
Example #8
Source File: optimizers_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testAverages(self):
    with self.cached_session() as session:
      scale = 2.
      grad = array_ops.ones([3, 4]) * scale
      log_norm = np.log(np.sqrt(scale**2 * grad.get_shape().num_elements()))
      grads_and_vars = [(grad, grad)]
      grads_and_vars = optimizers_lib.adaptive_clipping_fn(
          decay=0.5)(grads_and_vars)

      var_dict = {}
      for var in variables.global_variables():
        if var.name.startswith("AdaptiveMaxNorm"):
          var_dict[var.name.split(":")[0]] = var
      self.assertEqual(2, len(var_dict))
      moving_mean = var_dict["AdaptiveMaxNorm/mean"]
      moving_sq_mean = var_dict["AdaptiveMaxNorm/sq_mean"]
      variables.global_variables_initializer().run()
      mean, sq_mean = session.run([moving_mean, moving_sq_mean])
      self.assertEqual([0], mean)
      self.assertEqual([0], sq_mean)
      for i in range(20):
        mean, sq_mean, _ = session.run(
            [moving_mean, moving_sq_mean, grads_and_vars[0][0]])
        if i == 0:
          self.assertLess(mean, 0.9 * log_norm)
          self.assertLess(sq_mean, 0.9 * log_norm**2)

      self.assertAlmostEqual(float(mean), log_norm, places=4)
      self.assertAlmostEqual(float(sq_mean), log_norm**2, places=4) 
Example #9
Source File: supervisor.py    From ctw-baseline with MIT License 5 votes vote down vote up
def _init_saver(self, saver=USE_DEFAULT):
    """Initializes saver.

    Args:
      saver: A `Saver` object. If set to USE_DEFAULT, create one that
        saves all the variables.
    """
    if saver is Supervisor.USE_DEFAULT:
      saver = self._get_first_op_from_collection(ops.GraphKeys.SAVERS)
      if saver is None and variables.global_variables():
        saver = saver_mod.Saver()
        ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
    self._saver = saver 
Example #10
Source File: learning_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testTrainWithInitFromFn(self):
    logdir1 = tempfile.mkdtemp('tmp_logs1')
    logdir2 = tempfile.mkdtemp('tmp_logs2')

    # First, train the model one step (make sure the error is high).
    with ops.Graph().as_default():
      random_seed.set_random_seed(0)
      train_op = self.create_train_op()
      loss = learning.train(train_op, logdir1, number_of_steps=1)
      self.assertGreater(loss, .5)

    # Next, train the model to convergence.
    with ops.Graph().as_default():
      random_seed.set_random_seed(1)
      train_op = self.create_train_op()
      loss = learning.train(
          train_op, logdir1, number_of_steps=300, log_every_n_steps=10)
      self.assertIsNotNone(loss)
      self.assertLess(loss, .015)

    # Finally, advance the model a single step and validate that the loss is
    # still low.
    with ops.Graph().as_default():
      random_seed.set_random_seed(2)
      train_op = self.create_train_op()

      model_variables = variables_lib.global_variables()
      model_path = os.path.join(logdir1, 'model.ckpt-300')
      saver = saver_lib.Saver(model_variables)

      def RestoreFn(sess):
        saver.restore(sess, model_path)

      loss = learning.train(
          train_op, logdir2, number_of_steps=1, init_fn=RestoreFn)

      self.assertIsNotNone(loss)
      self.assertLess(loss, .015) 
Example #11
Source File: supervisor.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _init_saver(self, saver=USE_DEFAULT):
    """Initializes saver.

    Args:
      saver: A `Saver` object. If set to USE_DEFAULT, create one that
        saves all the variables.
    """
    if saver is Supervisor.USE_DEFAULT:
      saver = self._get_first_op_from_collection(ops.GraphKeys.SAVERS)
      if saver is None and variables.global_variables():
        saver = saver_mod.Saver()
        ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
    self._saver = saver 
Example #12
Source File: rev_block_lib_test.py    From tf-slim with Apache License 2.0 5 votes vote down vote up
def testReuse(self):

    def f(x):
      return core_layers.dense(x, self.CHANNELS // 2)

    def g(x):
      return core_layers.dense(x, self.CHANNELS // 2)

    x = random_ops.random_uniform(
        [self.BATCH_SIZE, self.CHANNELS], dtype=dtypes.float32)
    x1, x2 = array_ops.split(x, 2, axis=-1)

    with variable_scope.variable_scope("test"):
      y1, y2 = rev_block_lib.rev_block(x1, x2, f, g, num_layers=self.NUM_LAYERS)

    num_vars_before = len(variables.global_variables())

    with variable_scope.variable_scope("test", reuse=True):
      y1, y2 = rev_block_lib.rev_block(x1, x2, f, g, num_layers=self.NUM_LAYERS)

    num_vars_after = len(variables.global_variables())
    self.assertEqual(num_vars_before, num_vars_after)

    loss = math_ops.reduce_mean(y1 + y2)
    _ = gradients_impl.gradients(loss,
                                 [x] + variables.trainable_variables())

    with variable_scope.variable_scope("test", reuse=True):
      y1, y2 = rev_block_lib.rev_block(x1, x2, f, g, num_layers=self.NUM_LAYERS)

    num_vars_after = len(variables.global_variables())
    self.assertEqual(num_vars_before, num_vars_after) 
Example #13
Source File: stochastic_weight_averaging.py    From swa-tf with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def variables_to_restore(self, moving_avg_variables=None):
        """Returns a map of names to `Variables` to restore.
        If a variable has a moving average, use the moving average variable name as
        the restore name; otherwise, use the variable name.
        For example,
        ```python
          variables_to_restore = ema.variables_to_restore()
          saver = tf.train.Saver(variables_to_restore)
        ```
        Below is an example of such mapping:
        ```
          conv/batchnorm/gamma/ExponentialMovingAverage: conv/batchnorm/gamma,
          conv_4/conv2d_params/ExponentialMovingAverage: conv_4/conv2d_params,
          global_step: global_step
        ```
        Args:
          moving_avg_variables: a list of variables that require to use of the
            moving variable name to be restored. If None, it will default to
            variables.moving_average_variables() + variables.trainable_variables()
        Returns:
          A map from restore_names to variables. The restore_name can be the
          moving_average version of the variable name if it exist, or the original
          variable name.
        """
        name_map = {}
        if moving_avg_variables is None:
            # Include trainable variables and variables which have been explicitly
            # added to the moving_average_variables collection.
            moving_avg_variables = variables.trainable_variables()
            moving_avg_variables += variables.moving_average_variables()
        # Remove duplicates
        moving_avg_variables = set(moving_avg_variables)
        # Collect all the variables with moving average,
        for v in moving_avg_variables:
            name_map[self.average_name(v)] = v
        # Make sure we restore variables without moving averages as well.
        moving_avg_variable_names = set([v.name for v in moving_avg_variables])
        for v in list(set(variables.global_variables())):
            if v.name not in moving_avg_variable_names and v.op.name not in name_map:
                name_map[v.op.name] = v
        return name_map 
Example #14
Source File: supervisor.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 5 votes vote down vote up
def _init_saver(self, saver=USE_DEFAULT):
    """Initializes saver.

    Args:
      saver: A `Saver` object. If set to USE_DEFAULT, create one that
        saves all the variables.
    """
    if saver is Supervisor.USE_DEFAULT:
      saver = self._get_first_op_from_collection(ops.GraphKeys.SAVERS)
      if saver is None and variables.global_variables():
        saver = saver_mod.Saver()
        ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
    self._saver = saver 
Example #15
Source File: supervisor.py    From keras-lambda with MIT License 5 votes vote down vote up
def _init_saver(self, saver=USE_DEFAULT):
    """Initializes saver.

    Args:
      saver: A `Saver` object. If set to USE_DEFAULT, create one that
        saves all the variables.
    """
    if saver is Supervisor.USE_DEFAULT:
      saver = self._get_first_op_from_collection(ops.GraphKeys.SAVERS)
      if saver is None and variables.global_variables():
        saver = saver_mod.Saver()
        ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
    self._saver = saver 
Example #16
Source File: learning_test.py    From keras-lambda with MIT License 5 votes vote down vote up
def testTrainWithInitFromFn(self):
    logdir1 = os.path.join(
        tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs1')
    logdir2 = os.path.join(
        tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs2')

    # First, train the model one step (make sure the error is high).
    with ops.Graph().as_default():
      random_seed.set_random_seed(0)
      train_op = self.create_train_op()
      loss = learning.train(train_op, logdir1, number_of_steps=1)
      self.assertGreater(loss, .5)

    # Next, train the model to convergence.
    with ops.Graph().as_default():
      random_seed.set_random_seed(1)
      train_op = self.create_train_op()
      loss = learning.train(
          train_op, logdir1, number_of_steps=300, log_every_n_steps=10)
      self.assertIsNotNone(loss)
      self.assertLess(loss, .015)

    # Finally, advance the model a single step and validate that the loss is
    # still low.
    with ops.Graph().as_default():
      random_seed.set_random_seed(2)
      train_op = self.create_train_op()

      model_variables = variables_lib.global_variables()
      model_path = os.path.join(logdir1, 'model.ckpt-300')
      saver = saver_lib.Saver(model_variables)

      def RestoreFn(sess):
        saver.restore(sess, model_path)

      loss = learning.train(
          train_op, logdir2, number_of_steps=1, init_fn=RestoreFn)

      self.assertIsNotNone(loss)
      self.assertLess(loss, .015) 
Example #17
Source File: export.py    From keras-lambda with MIT License 5 votes vote down vote up
def _get_saver():
  """Lazy init and return saver."""
  saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
  if saver is not None:
    if saver:
      saver = saver[0]
    else:
      saver = None
  if saver is None and variables.global_variables():
    saver = tf_saver.Saver()
    ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
  return saver 
Example #18
Source File: graph_actions.py    From keras-lambda with MIT License 5 votes vote down vote up
def _get_saver():
  """Lazy init and return saver."""
  saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
  if saver is None and variables.global_variables():
    saver = tf_saver.Saver()
    ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
  return saver 
Example #19
Source File: specs_test.py    From keras-lambda with MIT License 5 votes vote down vote up
def DISABLED_testShared(self):
    with self.test_session():
      with specs.ops:
        # pylint: disable=undefined-variable
        f = Shared(Fr(100))
        g = f | f | f | f
      inputs = constant_op.constant(_rand(10, 100))
      _ = g.funcall(inputs)
      self.assertEqual(len(variables.global_variables()), 2) 
Example #20
Source File: graph_actions.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _get_saver():
  """Lazy init and return saver."""
  saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
  if saver is None and variables.global_variables():
    saver = tf_saver.Saver()
    ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
  return saver 
Example #21
Source File: export.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _get_saver():
  """Lazy init and return saver."""
  saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
  if saver is not None:
    if saver:
      saver = saver[0]
    else:
      saver = None
  if saver is None and variables.global_variables():
    saver = tf_saver.Saver()
    ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
  return saver 
Example #22
Source File: graph_actions.py    From lambda-packs with MIT License 5 votes vote down vote up
def _get_saver():
  """Lazy init and return saver."""
  saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
  if saver is None and variables.global_variables():
    saver = tf_saver.Saver()
    ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
  return saver 
Example #23
Source File: learning_test.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def testTrainWithInitFromFn(self):
    logdir1 = os.path.join(
        tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs1')
    logdir2 = os.path.join(
        tempfile.mkdtemp(prefix=self.get_temp_dir()), 'tmp_logs2')

    # First, train the model one step (make sure the error is high).
    with ops.Graph().as_default():
      random_seed.set_random_seed(0)
      train_op = self.create_train_op()
      loss = learning.train(train_op, logdir1, number_of_steps=1)
      self.assertGreater(loss, .5)

    # Next, train the model to convergence.
    with ops.Graph().as_default():
      random_seed.set_random_seed(1)
      train_op = self.create_train_op()
      loss = learning.train(
          train_op, logdir1, number_of_steps=300, log_every_n_steps=10)
      self.assertIsNotNone(loss)
      self.assertLess(loss, .015)

    # Finally, advance the model a single step and validate that the loss is
    # still low.
    with ops.Graph().as_default():
      random_seed.set_random_seed(2)
      train_op = self.create_train_op()

      model_variables = variables_lib.global_variables()
      model_path = os.path.join(logdir1, 'model.ckpt-300')
      saver = saver_lib.Saver(model_variables)

      def RestoreFn(sess):
        saver.restore(sess, model_path)

      loss = learning.train(
          train_op, logdir2, number_of_steps=1, init_fn=RestoreFn)

      self.assertIsNotNone(loss)
      self.assertLess(loss, .015) 
Example #24
Source File: export.py    From lambda-packs with MIT License 5 votes vote down vote up
def _get_saver():
  """Lazy init and return saver."""
  saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
  if saver is not None:
    if saver:
      saver = saver[0]
    else:
      saver = None
  if saver is None and variables.global_variables():
    saver = tf_saver.Saver()
    ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
  return saver 
Example #25
Source File: supervisor.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _init_saver(self, saver=USE_DEFAULT):
    """Initializes saver.

    Args:
      saver: A `Saver` object. If set to USE_DEFAULT, create one that
        saves all the variables.
    """
    if saver is Supervisor.USE_DEFAULT:
      saver = self._get_first_op_from_collection(ops.GraphKeys.SAVERS)
      if saver is None and variables.global_variables():
        saver = saver_mod.Saver()
        ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
    self._saver = saver 
Example #26
Source File: supervisor.py    From lambda-packs with MIT License 5 votes vote down vote up
def _init_saver(self, saver=USE_DEFAULT):
    """Initializes saver.

    Args:
      saver: A `Saver` object. If set to USE_DEFAULT, create one that
        saves all the variables.
    """
    if saver is Supervisor.USE_DEFAULT:
      saver = self._get_first_op_from_collection(ops.GraphKeys.SAVERS)
      if saver is None and variables.global_variables():
        saver = saver_mod.Saver()
        ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
    self._saver = saver 
Example #27
Source File: specs_test.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def DISABLED_testShared(self):
    with self.test_session():
      with specs.ops:
        # pylint: disable=undefined-variable
        f = Shared(Fr(100))
        g = f | f | f | f
      inputs = constant_op.constant(_rand(10, 100))
      _ = g.funcall(inputs)
      self.assertEqual(len(variables.global_variables()), 2) 
Example #28
Source File: backend.py    From lambda-packs with MIT License 5 votes vote down vote up
def _initialize_variables():
  """Utility to initialize uninitialized variables on the fly.
  """
  variables = variables_module.global_variables()
  uninitialized_variables = []
  for v in variables:
    if not hasattr(v, '_keras_initialized') or not v._keras_initialized:
      uninitialized_variables.append(v)
      v._keras_initialized = True
  if uninitialized_variables:
    sess = get_session()
    sess.run(variables_module.variables_initializer(uninitialized_variables)) 
Example #29
Source File: base.py    From lambda-packs with MIT License 4 votes vote down vote up
def add_variable(self, name, shape, dtype=None,
                   initializer=None, regularizer=None, trainable=True):
    """Adds a new variable to the layer, or gets an existing one; returns it.

    Arguments:
      name: variable name.
      shape: variable shape.
      dtype: The type of the variable. Defaults to `self.dtype`.
      initializer: initializer instance (callable).
      regularizer: regularizer instance (callable).
      trainable: whether the variable should be part of the layer's
        "trainable_variables" (e.g. variables, biases)
        or "non_trainable_variables" (e.g. BatchNorm mean, stddev).

    Returns:
      The created variable.
    """
    if dtype is None:
      dtype = self.dtype
    existing_variables = set(tf_variables.global_variables())

    self._set_scope(None)

    with vs.variable_scope(self._scope,
                           reuse=self.built or self._reuse) as scope:
      with ops.name_scope(scope.original_name_scope):
        variable = vs.get_variable(name,
                                   shape=shape,
                                   initializer=initializer,
                                   dtype=dtypes.as_dtype(dtype),
                                   trainable=trainable and self.trainable)
        if variable in existing_variables:
          return variable
        if regularizer:
          # To match the behavior of tf.get_variable(), we only
          # apply regularization if the variable is newly created.
          if isinstance(variable, tf_variables.PartitionedVariable):
            for v in variable:
              with ops.colocate_with(v.op):
                with ops.name_scope(name + '/Regularizer'):
                  regularization = regularizer(v)
              if regularization is not None:
                self.add_loss(regularization)
                _add_elements_to_collection(
                    regularization, ops.GraphKeys.REGULARIZATION_LOSSES)
          else:
            with ops.colocate_with(variable.op):
              with ops.name_scope(name + '/Regularizer'):
                regularization = regularizer(variable)
            if regularization is not None:
              self.add_loss(regularization)
              _add_elements_to_collection(
                  regularization, ops.GraphKeys.REGULARIZATION_LOSSES)
    if trainable:
      self._trainable_weights.append(variable)
    else:
      self._non_trainable_weights.append(variable)
    return variable 
Example #30
Source File: moving_averages.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 4 votes vote down vote up
def variables_to_restore(self, moving_avg_variables=None):
    """Returns a map of names to `Variables` to restore.

    If a variable has a moving average, use the moving average variable name as
    the restore name; otherwise, use the variable name.

    For example,

    ```python
      variables_to_restore = ema.variables_to_restore()
      saver = tf.train.Saver(variables_to_restore)
    ```

    Below is an example of such mapping:

    ```
      conv/batchnorm/gamma/ExponentialMovingAverage: conv/batchnorm/gamma,
      conv_4/conv2d_params/ExponentialMovingAverage: conv_4/conv2d_params,
      global_step: global_step
    ```
    Args:
      moving_avg_variables: a list of variables that require to use of the
        moving variable name to be restored. If None, it will default to
        variables.moving_average_variables() + variables.trainable_variables()

    Returns:
      A map from restore_names to variables. The restore_name can be the
      moving_average version of the variable name if it exist, or the original
      variable name.
    """
    name_map = {}
    if moving_avg_variables is None:
      # Include trainable variables and variables which have been explicitly
      # added to the moving_average_variables collection.
      moving_avg_variables = variables.trainable_variables()
      moving_avg_variables += variables.moving_average_variables()
    # Remove duplicates
    moving_avg_variables = set(moving_avg_variables)
    # Collect all the variables with moving average,
    for v in moving_avg_variables:
      name_map[self.average_name(v)] = v
    # Make sure we restore variables without moving average as well.
    for v in list(set(variables.global_variables()) - moving_avg_variables):
      if v.op.name not in name_map:
        name_map[v.op.name] = v
    return name_map