Python tensorflow.python.training.training_util.get_global_step() Examples
The following are 30
code examples of tensorflow.python.training.training_util.get_global_step().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.training.training_util
, or try the search function
.
Example #1
Source File: variables.py From tensornets with MIT License | 6 votes |
def assert_or_get_global_step(graph=None, global_step_tensor=None): """Verifies that a global step tensor is valid or gets one if None is given. If `global_step_tensor` is not None, check that it is a valid global step tensor (using `assert_global_step`). Otherwise find a global step tensor using `get_global_step` and return it. Args: graph: The graph to find the global step tensor for. global_step_tensor: The tensor to check for suitability as a global step. If None is given (the default), find a global step tensor. Returns: A tensor suitable as a global step, or `None` if none was provided and none was found. """ if global_step_tensor is None: # Get the global step tensor the same way the supervisor would. global_step_tensor = get_global_step(graph) else: assert_global_step(global_step_tensor) return global_step_tensor
Example #2
Source File: variables.py From auto-alt-text-lambda-api with MIT License | 6 votes |
def assert_or_get_global_step(graph=None, global_step_tensor=None): """Verifies that a global step tensor is valid or gets one if None is given. If `global_step_tensor` is not None, check that it is a valid global step tensor (using `assert_global_step`). Otherwise find a global step tensor using `get_global_step` and return it. Args: graph: The graph to find the global step tensor for. global_step_tensor: The tensor to check for suitability as a global step. If None is given (the default), find a global step tensor. Returns: A tensor suitable as a global step, or `None` if none was provided and none was found. """ if global_step_tensor is None: # Get the global step tensor the same way the supervisor would. global_step_tensor = get_global_step(graph) else: assert_global_step(global_step_tensor) return global_step_tensor
Example #3
Source File: variables.py From tf-slim with Apache License 2.0 | 6 votes |
def assert_or_get_global_step(graph=None, global_step_tensor=None): """Verifies that a global step tensor is valid or gets one if None is given. If `global_step_tensor` is not None, check that it is a valid global step tensor (using `assert_global_step`). Otherwise find a global step tensor using `get_global_step` and return it. Args: graph: The graph to find the global step tensor for. global_step_tensor: The tensor to check for suitability as a global step. If None is given (the default), find a global step tensor. Returns: A tensor suitable as a global step, or `None` if none was provided and none was found. """ if global_step_tensor is None: # Get the global step tensor the same way the supervisor would. global_step_tensor = get_global_step(graph) else: assert_global_step(global_step_tensor) return global_step_tensor
Example #4
Source File: early_stopping.py From estimator with Apache License 2.0 | 6 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() self._stop_var = self._get_or_create_stop_var_with_aggregation() assert distribution_strategy_context.in_cross_replica_context() strategy = distribution_strategy_context.get_strategy() self._stop_placeholder = None def stop_op_fn(var): placeholder = array_ops.placeholder_with_default( 0, tuple(), name='stop_value') if self._stop_placeholder is None: self._stop_placeholder = placeholder return var.assign_add(placeholder) self._stop_op = strategy.run( stop_op_fn, args=(self._stop_var,))
Example #5
Source File: variables.py From lambda-packs with MIT License | 6 votes |
def assert_or_get_global_step(graph=None, global_step_tensor=None): """Verifies that a global step tensor is valid or gets one if None is given. If `global_step_tensor` is not None, check that it is a valid global step tensor (using `assert_global_step`). Otherwise find a global step tensor using `get_global_step` and return it. Args: graph: The graph to find the global step tensor for. global_step_tensor: The tensor to check for suitability as a global step. If None is given (the default), find a global step tensor. Returns: A tensor suitable as a global step, or `None` if none was provided and none was found. """ if global_step_tensor is None: # Get the global step tensor the same way the supervisor would. global_step_tensor = get_global_step(graph) else: assert_global_step(global_step_tensor) return global_step_tensor
Example #6
Source File: variables.py From keras-lambda with MIT License | 6 votes |
def assert_or_get_global_step(graph=None, global_step_tensor=None): """Verifies that a global step tensor is valid or gets one if None is given. If `global_step_tensor` is not None, check that it is a valid global step tensor (using `assert_global_step`). Otherwise find a global step tensor using `get_global_step` and return it. Args: graph: The graph to find the global step tensor for. global_step_tensor: The tensor to check for suitability as a global step. If None is given (the default), find a global step tensor. Returns: A tensor suitable as a global step, or `None` if none was provided and none was found. """ if global_step_tensor is None: # Get the global step tensor the same way the supervisor would. global_step_tensor = get_global_step(graph) else: assert_global_step(global_step_tensor) return global_step_tensor
Example #7
Source File: variables.py From deep_image_model with Apache License 2.0 | 6 votes |
def create_global_step(graph=None): """Create global step tensor in graph. Args: graph: The graph in which to create the global step. If missing, use default graph. Returns: Global step tensor. Raises: ValueError: if global step key is already defined. """ graph = ops.get_default_graph() if graph is None else graph if get_global_step(graph) is not None: raise ValueError('"global_step" already exists.') # Create in proper graph and base name_scope. with graph.as_default() as g, g.name_scope(None): collections = [ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.GLOBAL_STEP] return variable(ops.GraphKeys.GLOBAL_STEP, shape=[], dtype=dtypes.int64, initializer=init_ops.zeros_initializer, trainable=False, collections=collections)
Example #8
Source File: tpu_estimator.py From embedding-as-service with MIT License | 5 votes |
def _create_global_step(graph): graph = graph or ops.get_default_graph() if training.get_global_step(graph) is not None: raise ValueError('"global_step" already exists.') # Create in proper graph and base name_scope. with graph.as_default() as g, g.name_scope(None): return variable_scope.get_variable( ops.GraphKeys.GLOBAL_STEP, shape=[], dtype=dtypes.int64, initializer=init_ops.zeros_initializer(), trainable=False, use_resource=True, collections=[ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.GLOBAL_STEP])
Example #9
Source File: basic_session_run_hooks.py From keras-lambda with MIT License | 5 votes |
def begin(self): self._summary_writer = SummaryWriterCache.get(self._checkpoint_dir) self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError( "Global step should be created to use CheckpointSaverHook.") for l in self._listeners: l.begin()
Example #10
Source File: basic_session_run_hooks.py From keras-lambda with MIT License | 5 votes |
def end(self, session): last_step = session.run(training_util.get_global_step()) if last_step != self._timer.last_triggered_step(): self._save(last_step, session) for l in self._listeners: l.end(session, last_step)
Example #11
Source File: basic_session_run_hooks.py From keras-lambda with MIT License | 5 votes |
def begin(self): if self._summary_writer is None and self._output_dir: self._summary_writer = SummaryWriterCache.get(self._output_dir) self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError( "Global step should be created to use StepCounterHook.") self._summary_tag = self._global_step_tensor.op.name + "/sec"
Example #12
Source File: basic_session_run_hooks.py From keras-lambda with MIT License | 5 votes |
def begin(self): if self._summary_writer is None and self._output_dir: self._summary_writer = SummaryWriterCache.get(self._output_dir) self._next_step = None self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError( "Global step should be created to use SummarySaverHook.")
Example #13
Source File: variables.py From keras-lambda with MIT License | 5 votes |
def get_global_step(graph=None): return training_util.get_global_step(graph)
Example #14
Source File: variables.py From keras-lambda with MIT License | 5 votes |
def create_global_step(graph=None): """Create global step tensor in graph. Args: graph: The graph in which to create the global step. If missing, use default graph. Returns: Global step tensor. Raises: ValueError: if global step key is already defined. """ graph = ops.get_default_graph() if graph is None else graph if get_global_step(graph) is not None: raise ValueError('"global_step" already exists.') # Create in proper graph and base name_scope. with graph.as_default() as g, g.name_scope(None): collections = [ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.GLOBAL_STEP] return variable( ops.GraphKeys.GLOBAL_STEP, shape=[], dtype=dtypes.int64, initializer=init_ops.zeros_initializer(), trainable=False, collections=collections)
Example #15
Source File: tpu_estimator.py From xlnet with Apache License 2.0 | 5 votes |
def _create_global_step(graph): graph = graph or ops.get_default_graph() if training.get_global_step(graph) is not None: raise ValueError('"global_step" already exists.') # Create in proper graph and base name_scope. with graph.as_default() as g, g.name_scope(None): return variable_scope.get_variable( ops.GraphKeys.GLOBAL_STEP, shape=[], dtype=dtypes.int64, initializer=init_ops.zeros_initializer(), trainable=False, use_resource=True, collections=[ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.GLOBAL_STEP])
Example #16
Source File: variables.py From keras-lambda with MIT License | 5 votes |
def get_or_create_global_step(graph=None): """Returns and create (if necessary) the global step variable. Args: graph: The graph in which to create the global step. If missing, use default graph. Returns: the tensor representing the global step variable. """ graph = ops.get_default_graph() if graph is None else graph globalstep = get_global_step(graph) if globalstep is None: globalstep = create_global_step(graph) return globalstep
Example #17
Source File: tpu_estimator.py From xlnet with Apache License 2.0 | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError('Global step should be created.') self._iterations_per_loop_var = _create_or_get_iterations_per_loop()
Example #18
Source File: basic_session_run_hooks.py From keras-lambda with MIT License | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError("Global step should be created to use StopAtStepHook.")
Example #19
Source File: cifar10_utils.py From object_detection_kitti with Apache License 2.0 | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError( 'Global step should be created to use StepCounterHook.')
Example #20
Source File: cifar10_utils.py From Live-feed-object-device-identification-using-Tensorflow-and-OpenCV with Apache License 2.0 | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError( 'Global step should be created to use StepCounterHook.')
Example #21
Source File: cifar10_utils.py From nasbot with MIT License | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError( 'Global step should be created to use StepCounterHook.')
Example #22
Source File: utils.py From DistributedDeepLearning with MIT License | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError("Global step should be created to use StepCounterHook.")
Example #23
Source File: hooks.py From NJUNMT-tf with Apache License 2.0 | 5 votes |
def __init__(self, checkpoint_dir, display_steps=100, maximum_train_steps=None, do_summary=True, is_chief=True): """ Initializes the hook. Args: checkpoint_dir: A string, base directory for the checkpoint files. display_steps: A python integer, display every N steps. maximum_train_steps: A python integer, the maximum training steps. do_summary: Whether to save summaries when display. is_chief: Whether this is the chief process.do_summary: """ tf.logging.info("Create DisplayHook.") self._checkpoint_dir = checkpoint_dir # display steps self._display_steps = display_steps self._maximum_train_steps = maximum_train_steps self._do_summary = do_summary self._is_chief = is_chief # not used now # display values global_step = training_util.get_global_step() display_keys = ops.get_collection(Constants.DISPLAY_KEY_COLLECTION_NAME) display_values = ops.get_collection(Constants.DISPLAY_VALUE_COLLECTION_NAME) self._display_args = dict(zip(display_keys, display_values)) self._display_args["global_step"] = global_step # timer & summary writer self._timer = None self._logging_timer = None self._summary_writer = None
Example #24
Source File: hooks.py From NJUNMT-tf with Apache License 2.0 | 5 votes |
def __init__(self, checkpoint_dir, save_checkpoint_steps=1000, saver=None, pretrain_model=None, problem_name=None, model_name="njunmt.models.SequenceToSequence", do_summary=True, is_chief=True): """ Initializes the hook. Args: checkpoint_dir: A string, base directory for the checkpoint files. save_checkpoint_steps: A python integer, save every N steps. saver: `Saver` object, used for saving. pretrain_model: The pretrained model dir. problem_name: A string. model_name: The model name. do_summary: Whether to save summaries. is_chief: Whether this is the chief process. """ tf.logging.info("Create CheckpointSaverHook.") if saver is None: saver = get_saver_or_default(max_to_keep=8) # pylint: disable=protected-access self._saver = saver self._checkpoint_dir = checkpoint_dir self._save_path = os.path.join(checkpoint_dir, Constants.MODEL_CKPT_FILENAME) self._pretrain_model = pretrain_model self._problem_name = problem_name self._model_name = model_name # save every n steps self._save_checkpoint_steps = save_checkpoint_steps # variable for session.run self._global_step = training_util.get_global_step() # for after create session self._do_summary = do_summary self._is_chief = is_chief # timer & summary writer self._timer = None self._summary_writer = None
Example #25
Source File: imagenet_utils.py From uai-sdk with Apache License 2.0 | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError( 'Global step should be created to use StepCounterHook.')
Example #26
Source File: cifar10_utils.py From uai-sdk with Apache License 2.0 | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError( 'Global step should be created to use StepCounterHook.')
Example #27
Source File: imagenet_utils.py From uai-sdk with Apache License 2.0 | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError( 'Global step should be created to use StepCounterHook.')
Example #28
Source File: tpu_estimator.py From transformer-xl with Apache License 2.0 | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError('Global step should be created.') self._iterations_per_loop_var = _create_or_get_iterations_per_loop()
Example #29
Source File: tpu_estimator.py From transformer-xl with Apache License 2.0 | 5 votes |
def _create_global_step(graph): graph = graph or ops.get_default_graph() if training.get_global_step(graph) is not None: raise ValueError('"global_step" already exists.') # Create in proper graph and base name_scope. with graph.as_default() as g, g.name_scope(None): return variable_scope.get_variable( ops.GraphKeys.GLOBAL_STEP, shape=[], dtype=dtypes.int64, initializer=init_ops.zeros_initializer(), trainable=False, use_resource=True, collections=[ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.GLOBAL_STEP])
Example #30
Source File: tpu_estimator.py From embedding-as-service with MIT License | 5 votes |
def begin(self): self._global_step_tensor = training_util.get_global_step() if self._global_step_tensor is None: raise RuntimeError('Global step should be created.') self._iterations_per_loop_var = _create_or_get_iterations_per_loop()