Python tensorflow.python.platform.tf_logging.warning() Examples

The following are 30 code examples of tensorflow.python.platform.tf_logging.warning(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.platform.tf_logging , or try the search function .
Example #1
Source File: variable_clipping_optimizer.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def _clip_sparse(self, grad, var):
    assert isinstance(grad, ops.IndexedSlices)
    clip_dims = self._vars_to_clip_dims[var]
    if 0 in clip_dims:
      logging.warning("Clipping norm across dims %s for %s is inefficient "
                      "when including sparse dimension 0.", clip_dims,
                      var.op.name)
      return self._clip_dense(var)

    with ops.colocate_with(var):
      var_subset = array_ops.gather(var, grad.indices)
    with self._maybe_colocate_with(var):
      normalized_var_subset = clip_ops.clip_by_norm(
          var_subset, self._max_norm, clip_dims)
      delta = ops.IndexedSlices(
          var_subset - normalized_var_subset, grad.indices, grad.dense_shape)
    with ops.colocate_with(var):
      return var.scatter_sub(delta, use_locking=self._use_locking) 
Example #2
Source File: estimator.py    From lambda-packs with MIT License 6 votes vote down vote up
def _extract_metric_update_ops(self, eval_dict):
    """Separate update operations from metric value operations."""
    update_ops = []
    value_ops = {}
    for name, metric_ops in six.iteritems(eval_dict):
      if isinstance(metric_ops, (list, tuple)):
        if len(metric_ops) == 2:
          value_ops[name] = metric_ops[0]
          update_ops.append(metric_ops[1])
        else:
          logging.warning(
              'Ignoring metric {}. It returned a list|tuple with len {}, '
              'expected 2'.format(name, len(metric_ops)))
          value_ops[name] = metric_ops
      else:
        value_ops[name] = metric_ops

    if update_ops:
      update_ops = control_flow_ops.group(*update_ops)
    else:
      update_ops = None

    return update_ops, value_ops 
Example #3
Source File: ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def prepend_name_scope(name, import_scope):
  """Prepends name scope to a name.

  Args:
    name: A `string` name.
    import_scope: Optional `string`. Name scope to add.

  Returns:
    Name with name scope added, or the original name if import_scope
    is None.
  """
  if import_scope:
    try:
      str_to_replace = r"([\^]|loc:@|^)(.*)"
      return re.sub(str_to_replace, r"\1" + import_scope + r"/\2",
                    compat.as_str(name))
    except TypeError as e:
      # If the name is not of a type we can process, simply return it.
      logging.warning(e)
      return name
  else:
    return name


# pylint: disable=g-doc-return-or-yield 
Example #4
Source File: ops.py    From lambda-packs with MIT License 6 votes vote down vote up
def strip_name_scope(name, export_scope):
  """Removes name scope from a name.

  Args:
    name: A `string` name.
    export_scope: Optional `string`. Name scope to remove.

  Returns:
    Name with name scope removed, or the original name if export_scope
    is None.
  """
  if export_scope:
    try:
      # Strips export_scope/, export_scope///,
      # ^export_scope/, loc:@export_scope/.
      str_to_replace = r"([\^]|loc:@|^)" + export_scope + r"[\/]+(.*)"
      return re.sub(str_to_replace, r"\1\2", compat.as_str(name), count=1)
    except TypeError as e:
      # If the name is not of a type we can process, simply return it.
      logging.warning(e)
      return name
  else:
    return name 
Example #5
Source File: tpu_estimator.py    From Chinese-XLNet with Apache License 2.0 6 votes vote down vote up
def _validate_input_pipeline(self):
    """Validates the input pipeline.

    Perform some sanity checks to log user friendly information. We should
    error out to give users better error message. But, if
    _WRAP_INPUT_FN_INTO_WHILE_LOOP is False (legacy behavior), we cannot break
    user code, so, log a warning.

    Raises:
      RuntimeError: If the validation failed.
    """
    if ops.get_default_graph().get_collection(ops.GraphKeys.QUEUE_RUNNERS):
      err_msg = ('Input pipeline contains one or more QueueRunners. '
                 'It could be slow and not scalable. Please consider '
                 'converting your input pipeline to use `tf.data` instead (see '
                 'https://www.tensorflow.org/guide/datasets for '
                 'instructions.')
      if _WRAP_INPUT_FN_INTO_WHILE_LOOP:
        raise RuntimeError(err_msg)
      else:
        logging.warn(err_msg) 
Example #6
Source File: op_def_library.py    From lambda-packs with MIT License 6 votes vote down vote up
def _MakeShape(v, arg_name):
  """Convert v into a TensorShapeProto."""
  # Args:
  #   v: A TensorShapeProto, a list of ints, or a tensor_shape.TensorShape.
  #   arg_name: String, for error messages.

  # Returns:
  #   A TensorShapeProto.
  if isinstance(v, tensor_shape_pb2.TensorShapeProto):
    for d in v.dim:
      if d.name:
        logging.warning("Warning: TensorShapeProto with a named dimension: %s",
                        str(v))
        break
    return v
  return tensor_shape.as_shape(v).as_proto() 
Example #7
Source File: ffmpeg_ops.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def _load_library(name, op_list=None):
  """Loads a .so file containing the specified operators.

  Args:
    name: The name of the .so file to load.
    op_list: A list of names of operators that the library should have. If None
        then the .so file's contents will not be verified.

  Raises:
    NameError if one of the required ops is missing.
  """
  try:
    filename = resource_loader.get_path_to_datafile(name)
    library = load_library.load_op_library(filename)
    for expected_op in (op_list or []):
      for lib_op in library.OP_LIST.op:
        if lib_op.name == expected_op:
          break
      else:
        raise NameError('Could not find operator %s in dynamic library %s' %
                        (expected_op, name))
  except errors.NotFoundError:
    logging.warning('%s file could not be loaded.', name) 
Example #8
Source File: tpu_estimator.py    From Chinese-XLNet with Apache License 2.0 6 votes vote down vote up
def create_cpu_hostcall(host_calls):
    """Runs on the host_call on CPU instead of TPU when use_tpu=False."""

    _OutfeedHostCall.validate(host_calls)
    ret = {}
    for name, host_call in host_calls.items():
      host_fn, tensors = host_call
      if isinstance(tensors, (tuple, list)):
        ret[name] = host_fn(*tensors)
      else:
        # Must be dict.
        try:
          ret[name] = host_fn(**tensors)
        except TypeError as e:
          logging.warning(
              'Exception while calling %s: %s. It is likely the tensors '
              '(%s[1]) do not match the '
              'function\'s arguments', name, e, name)
          raise e
    return ret 
Example #9
Source File: handler.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def _serve_runs(self, unused_query_params):
    """Return a JSON object about runs and tags.

    Returns a mapping from runs to tagType to list of tags for that run.

    Returns:
      {runName: {images: [tag1, tag2, tag3],
                 audio: [tag4, tag5, tag6],
                 scalars: [tagA, tagB, tagC],
                 histograms: [tagX, tagY, tagZ],
                 firstEventTimestamp: 123456.789}}
    """
    runs = self._multiplexer.Runs()
    for run_name, run_data in runs.items():
      try:
        run_data['firstEventTimestamp'] = self._multiplexer.FirstEventTimestamp(
            run_name)
      except ValueError:
        logging.warning('Unable to get first event timestamp for run %s',
                        run_name)
        run_data['firstEventTimestamp'] = None
    self.respond(runs, 'application/json') 
Example #10
Source File: variable_clipping_optimizer.py    From lambda-packs with MIT License 6 votes vote down vote up
def _clip_sparse(self, grad, var):
    assert isinstance(grad, ops.IndexedSlices)
    clip_dims = self._vars_to_clip_dims[var]
    if 0 in clip_dims:
      logging.warning("Clipping norm across dims %s for %s is inefficient "
                      "when including sparse dimension 0.", clip_dims,
                      var.op.name)
      return self._clip_dense(var)

    with ops.colocate_with(var):
      var_subset = array_ops.gather(var, grad.indices)
    with self._maybe_colocate_with(var):
      normalized_var_subset = clip_ops.clip_by_norm(
          var_subset, self._max_norm, clip_dims)
      delta = ops.IndexedSlices(
          var_subset - normalized_var_subset, grad.indices, grad.dense_shape)
    with ops.colocate_with(var):
      return var.scatter_sub(delta, use_locking=self._use_locking) 
Example #11
Source File: op_def_library.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def _MakeShape(v, arg_name):
  """Convert v into a TensorShapeProto."""
  # Args:
  #   v: A TensorShapeProto, a list of ints, or a tensor_shape.TensorShape.
  #   arg_name: String, for error messages.

  # Returns:
  #   A TensorShapeProto.
  if isinstance(v, tensor_shape_pb2.TensorShapeProto):
    for d in v.dim:
      if d.name:
        logging.warning("Warning: TensorShapeProto with a named dimension: %s",
                        str(v))
        break
    return v
  return tensor_shape.as_shape(v).as_proto() 
Example #12
Source File: op_def_library.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def _MakeShape(v, arg_name):
  """Convert v into a TensorShapeProto."""
  # Args:
  #   v: A TensorShapeProto, a list of ints, or a tensor_shape.TensorShape.
  #   arg_name: String, for error messages.

  # Returns:
  #   A TensorShapeProto.
  if isinstance(v, tensor_shape_pb2.TensorShapeProto):
    for d in v.dim:
      if d.name:
        logging.warning("Warning: TensorShapeProto with a named dimension: %s",
                        str(v))
        break
    return v
  return tensor_shape.as_shape(v).as_proto() 
Example #13
Source File: estimator.py    From lambda-packs with MIT License 6 votes vote down vote up
def _verify_model_fn_args(model_fn, params):
  """Verifies model fn arguments."""
  args = set(_model_fn_args(model_fn))
  if 'features' not in args:
    raise ValueError('model_fn (%s) must include features argument.' % model_fn)
  if 'labels' not in args:
    raise ValueError('model_fn (%s) must include labels argument.' % model_fn)
  if params is not None and 'params' not in args:
    raise ValueError('model_fn (%s) does not include params argument, '
                     'but params (%s) is passed to Estimator.' % (model_fn,
                                                                  params))
  if params is None and 'params' in args:
    logging.warning('Estimator\'s model_fn (%s) includes params '
                    'argument, but params are not passed to Estimator.',
                    model_fn)
  if tf_inspect.ismethod(model_fn):
    if 'self' in args:
      args.remove('self')
  non_valid_args = list(args - _VALID_MODEL_FN_ARGS)
  if non_valid_args:
    raise ValueError('model_fn (%s) has following not expected args: %s' %
                     (model_fn, non_valid_args)) 
Example #14
Source File: estimator.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def _extract_metric_update_ops(self, eval_dict):
    """Separate update operations from metric value operations."""
    update_ops = []
    value_ops = {}
    for name, metric_ops in six.iteritems(eval_dict):
      if isinstance(metric_ops, (list, tuple)):
        if len(metric_ops) == 2:
          value_ops[name] = metric_ops[0]
          update_ops.append(metric_ops[1])
        else:
          logging.warning(
              'Ignoring metric {}. It returned a list|tuple with len {}, '
              'expected 2'.format(name, len(metric_ops)))
          value_ops[name] = metric_ops
      else:
        value_ops[name] = metric_ops

    if update_ops:
      update_ops = control_flow_ops.group(*update_ops)
    else:
      update_ops = None

    return update_ops, value_ops 
Example #15
Source File: handler.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def _serve_runs(self, unused_query_params):
    """Return a JSON object about runs and tags.

    Returns a mapping from runs to tagType to list of tags for that run.

    Returns:
      {runName: {images: [tag1, tag2, tag3],
                 audio: [tag4, tag5, tag6],
                 scalars: [tagA, tagB, tagC],
                 histograms: [tagX, tagY, tagZ],
                 firstEventTimestamp: 123456.789}}
    """
    runs = self._multiplexer.Runs()
    for run_name, run_data in runs.items():
      try:
        run_data['firstEventTimestamp'] = self._multiplexer.FirstEventTimestamp(
            run_name)
      except ValueError:
        logging.warning('Unable to get first event timestamp for run %s',
                        run_name)
        run_data['firstEventTimestamp'] = None
    self.respond(runs, 'application/json') 
Example #16
Source File: basic_session_run_hooks.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def after_run(self, run_context, run_values):
    if np.isnan(run_values.results):
      failure_message = "Model diverged with loss = NaN."
      if self._fail_on_nan_loss:
        logging.error(failure_message)
        raise NanLossDuringTrainingError
      else:
        logging.warning(failure_message)
        # We don't raise an error but we request stop without an exception.
        run_context.request_stop() 
Example #17
Source File: handler.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _serve_static_file(self, path):
    """Serves the static file located at the given path.

    Args:
      path: The path of the static file, relative to the tensorboard/ directory.
    """
    # Strip off the leading forward slash.
    orig_path = path.lstrip('/')
    if not self._path_is_safe(orig_path):
      logging.warning('path not safe: %s', orig_path)
      self.respond('Naughty naughty!', 'text/plain', 400)
      return
    # Resource loader wants a path relative to //WORKSPACE/tensorflow.
    path = os.path.join('tensorboard', orig_path)
    # Open the file and read it.
    try:
      contents = resource_loader.load_resource(path)
    except IOError:
      # For compatibility with latest version of Bazel, we renamed bower
      # packages to use '_' rather than '-' in their package name.
      # This means that the directory structure is changed too.
      # So that all our recursive imports work, we need to modify incoming
      # requests to map onto the new directory structure.
      path = orig_path
      components = path.split('/')
      components[0] = components[0].replace('-', '_')
      path = ('/').join(components)
      # Bazel keeps all the external dependencies in //WORKSPACE/external.
      # and resource loader wants a path relative to //WORKSPACE/tensorflow/.
      path = os.path.join('../external', path)
      try:
        contents = resource_loader.load_resource(path)
      except IOError:
        logging.info('path %s not found, sending 404', path)
        self.respond('Not found', 'text/plain', 404)
        return
    mimetype, content_encoding = mimetypes.guess_type(path)
    mimetype = mimetype or 'application/octet-stream'
    self.respond(contents, mimetype, expires=3600,
                 content_encoding=content_encoding) 
Example #18
Source File: supervisor.py    From ctw-baseline with MIT License 5 votes vote down vote up
def _default_global_step_tensor(self):
    """Returns the global_step from the default graph.

    Returns:
      The global step `Tensor` or `None`.
    """
    try:
      gs = ops.get_default_graph().get_tensor_by_name("global_step:0")
      if gs.dtype.base_dtype in [dtypes.int32, dtypes.int64]:
        return gs
      else:
        logging.warning("Found 'global_step' is not an int type: %s", gs.dtype)
        return None
    except KeyError:
      return None 
Example #19
Source File: handler.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _setup_data_handlers(self):
    self.data_handlers = {
        DATA_PREFIX + LOGDIR_ROUTE: self._serve_logdir,
        DATA_PREFIX + SCALARS_ROUTE: self._serve_scalars,
        DATA_PREFIX + GRAPH_ROUTE: self._serve_graph,
        DATA_PREFIX + RUN_METADATA_ROUTE: self._serve_run_metadata,
        DATA_PREFIX + HISTOGRAMS_ROUTE: self._serve_histograms,
        DATA_PREFIX + COMPRESSED_HISTOGRAMS_ROUTE:
            self._serve_compressed_histograms,
        DATA_PREFIX + IMAGES_ROUTE: self._serve_images,
        DATA_PREFIX + INDIVIDUAL_IMAGE_ROUTE: self._serve_image,
        DATA_PREFIX + AUDIO_ROUTE: self._serve_audio,
        DATA_PREFIX + INDIVIDUAL_AUDIO_ROUTE: self._serve_individual_audio,
        DATA_PREFIX + RUNS_ROUTE: self._serve_runs,
        '/app.js': self._serve_js
    }

    # Serve the routes from the registered plugins using their name as the route
    # prefix. For example if plugin z has two routes /a and /b, they will be
    # served as /data/plugin/z/a and /data/plugin/z/b.
    for name in REGISTERED_PLUGINS:
      try:
        plugin = REGISTERED_PLUGINS[name]
        plugin_handlers = plugin.get_plugin_handlers(
            self._multiplexer.RunPaths(), self._logdir)
      except Exception as e:  # pylint: disable=broad-except
        logging.warning('Plugin %s failed. Exception: %s', name, str(e))
        continue
      for route, handler in plugin_handlers.items():
        path = DATA_PREFIX + PLUGIN_PREFIX + '/' + name + route
        self.data_handlers[path] = functools.partial(handler, self) 
Example #20
Source File: ops.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def VARIABLES(cls):  # pylint: disable=no-self-argument
    logging.warning("VARIABLES collection name is deprecated, "
                    "please use GLOBAL_VARIABLES instead; "
                    "VARIABLES will be removed after 2017-03-02.")
    return cls.GLOBAL_VARIABLES 
Example #21
Source File: event_multiplexer.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def AddRun(self, path, name=None):
    """Add a run to the multiplexer.

    If the name is not specified, it is the same as the path.

    If a run by that name exists, and we are already watching the right path,
      do nothing. If we are watching a different path, replace the event
      accumulator.

    If `Reload` has been called, it will `Reload` the newly created
    accumulators.

    Args:
      path: Path to the event files (or event directory) for given run.
      name: Name of the run to add. If not provided, is set to path.

    Returns:
      The `EventMultiplexer`.
    """
    if name is None or name is '':
      name = path
    accumulator = None
    with self._accumulators_mutex:
      if name not in self._accumulators or self._paths[name] != path:
        if name in self._paths and self._paths[name] != path:
          # TODO(danmane) - Make it impossible to overwrite an old path with
          # a new path (just give the new path a distinct name)
          logging.warning('Conflict for name %s: old path %s, new path %s',
                          name, self._paths[name], path)
        logging.info('Constructing EventAccumulator for %s', path)
        accumulator = event_accumulator.EventAccumulator(
            path,
            size_guidance=self._size_guidance,
            purge_orphaned_data=self.purge_orphaned_data)
        self._accumulators[name] = accumulator
        self._paths[name] = path
    if accumulator:
      if self._reload_called:
        accumulator.Reload()
    return self 
Example #22
Source File: supervisor.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def _default_global_step_tensor(self):
    """Returns the global_step from the default graph.

    Returns:
      The global step `Tensor` or `None`.
    """
    try:
      gs = ops.get_default_graph().get_tensor_by_name("global_step:0")
      if gs.dtype.base_dtype in [dtypes.int32, dtypes.int64]:
        return gs
      else:
        logging.warning("Found 'global_step' is not an int type: %s", gs.dtype)
        return None
    except KeyError:
      return None 
Example #23
Source File: experimental.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def experimental(func):
  """Decorator for marking functions or methods experimental.

  This decorator logs an experimental warning whenever the decorated function is
  called. It has the following format:

    <function> (from <module>) is experimental and may change or be removed at
    any time, and without warning.

  <function> will include the class name if it is a method.

  It also edits the docstring of the function: ' (experimental)' is appended
  to the first line of the docstring and a notice is prepended to the rest of
  the docstring.

  Args:
    func: A function or method to mark experimental.

  Returns:
    Decorated function or method.
  """
  decorator_utils.validate_callable(func, 'experimental')
  @functools.wraps(func)
  def new_func(*args, **kwargs):
    logging.warning(
        '%s (from %s) is experimental and may change or be removed at '
        'any time, and without warning.',
        decorator_utils.get_qualified_name(func), func.__module__)
    return func(*args, **kwargs)
  new_func.__doc__ = _add_experimental_function_notice_to_docstring(
      func.__doc__)
  return new_func 
Example #24
Source File: monitors.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def every_n_step_end(self, step, outputs):
    super(NanLoss, self).every_n_step_end(step, outputs)
    if np.isnan(_extract_output(outputs, self._loss_tensor)):
      failure_message = "Model diverged with loss = NaN."
      if self._fail_on_nan_loss:
        logging.error(failure_message)
        raise NanLossDuringTrainingError
      else:
        logging.warning(failure_message)
        # We don't raise an error but we return "should stop" so we stop, but
        # without an exception.
        return True 
Example #25
Source File: plugin.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _get_reader_for_run(self, run):
    if run in self.readers:
      return self.readers[run]

    config = self._configs[run]
    reader = None
    if config.model_checkpoint_path:
      try:
        reader = NewCheckpointReader(config.model_checkpoint_path)
      except Exception:  # pylint: disable=broad-except
        logging.warning('Failed reading %s', config.model_checkpoint_path)
    self.readers[run] = reader
    return reader 
Example #26
Source File: handler.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _serve_static_file(self, path):
    """Serves the static file located at the given path.

    Args:
      path: The path of the static file, relative to the tensorboard/ directory.
    """
    # Strip off the leading forward slash.
    orig_path = path.lstrip('/')
    if not self._path_is_safe(orig_path):
      logging.warning('path not safe: %s', orig_path)
      self.respond('Naughty naughty!', 'text/plain', 400)
      return
    # Resource loader wants a path relative to //WORKSPACE/tensorflow.
    path = os.path.join('tensorboard', orig_path)
    # Open the file and read it.
    try:
      contents = resource_loader.load_resource(path)
    except IOError:
      # For compatibility with latest version of Bazel, we renamed bower
      # packages to use '_' rather than '-' in their package name.
      # This means that the directory structure is changed too.
      # So that all our recursive imports work, we need to modify incoming
      # requests to map onto the new directory structure.
      path = orig_path
      components = path.split('/')
      components[0] = components[0].replace('-', '_')
      path = ('/').join(components)
      # Bazel keeps all the external dependencies in //WORKSPACE/external.
      # and resource loader wants a path relative to //WORKSPACE/tensorflow/.
      path = os.path.join('../external', path)
      try:
        contents = resource_loader.load_resource(path)
      except IOError:
        logging.info('path %s not found, sending 404', path)
        self.respond('Not found', 'text/plain', 404)
        return
    mimetype, content_encoding = mimetypes.guess_type(path)
    mimetype = mimetype or 'application/octet-stream'
    self.respond(contents, mimetype, expires=3600,
                 content_encoding=content_encoding) 
Example #27
Source File: handler.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _setup_data_handlers(self):
    self.data_handlers = {
        DATA_PREFIX + LOGDIR_ROUTE: self._serve_logdir,
        DATA_PREFIX + SCALARS_ROUTE: self._serve_scalars,
        DATA_PREFIX + GRAPH_ROUTE: self._serve_graph,
        DATA_PREFIX + RUN_METADATA_ROUTE: self._serve_run_metadata,
        DATA_PREFIX + HISTOGRAMS_ROUTE: self._serve_histograms,
        DATA_PREFIX + COMPRESSED_HISTOGRAMS_ROUTE:
            self._serve_compressed_histograms,
        DATA_PREFIX + IMAGES_ROUTE: self._serve_images,
        DATA_PREFIX + INDIVIDUAL_IMAGE_ROUTE: self._serve_image,
        DATA_PREFIX + AUDIO_ROUTE: self._serve_audio,
        DATA_PREFIX + INDIVIDUAL_AUDIO_ROUTE: self._serve_individual_audio,
        DATA_PREFIX + RUNS_ROUTE: self._serve_runs,
        '/app.js': self._serve_js
    }

    # Serve the routes from the registered plugins using their name as the route
    # prefix. For example if plugin z has two routes /a and /b, they will be
    # served as /data/plugin/z/a and /data/plugin/z/b.
    for name in self._registered_plugins:
      try:
        plugin = self._registered_plugins[name]
        plugin_handlers = plugin.get_plugin_handlers(
            self._multiplexer.RunPaths(), self._logdir)
      except Exception as e:  # pylint: disable=broad-except
        logging.warning('Plugin %s failed. Exception: %s', name, str(e))
        continue
      for route, handler in plugin_handlers.items():
        path = DATA_PREFIX + PLUGIN_PREFIX + '/' + name + route
        self.data_handlers[path] = functools.partial(handler, self) 
Example #28
Source File: ops.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def VARIABLES(cls):  # pylint: disable=no-self-argument
    logging.warning("VARIABLES collection name is deprecated, "
                    "please use GLOBAL_VARIABLES instead; "
                    "VARIABLES will be removed after 2017-03-02.")
    return cls.GLOBAL_VARIABLES 
Example #29
Source File: event_multiplexer.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def AddRun(self, path, name=None):
    """Add a run to the multiplexer.

    If the name is not specified, it is the same as the path.

    If a run by that name exists, and we are already watching the right path,
      do nothing. If we are watching a different path, replace the event
      accumulator.

    If `Reload` has been called, it will `Reload` the newly created
    accumulators.

    Args:
      path: Path to the event files (or event directory) for given run.
      name: Name of the run to add. If not provided, is set to path.

    Returns:
      The `EventMultiplexer`.
    """
    if name is None or name is '':
      name = path
    accumulator = None
    with self._accumulators_mutex:
      if name not in self._accumulators or self._paths[name] != path:
        if name in self._paths and self._paths[name] != path:
          # TODO(danmane) - Make it impossible to overwrite an old path with
          # a new path (just give the new path a distinct name)
          logging.warning('Conflict for name %s: old path %s, new path %s',
                          name, self._paths[name], path)
        logging.info('Constructing EventAccumulator for %s', path)
        accumulator = event_accumulator.EventAccumulator(
            path,
            size_guidance=self._size_guidance,
            purge_orphaned_data=self.purge_orphaned_data)
        self._accumulators[name] = accumulator
        self._paths[name] = path
    if accumulator:
      if self._reload_called:
        accumulator.Reload()
    return self 
Example #30
Source File: supervisor.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _default_global_step_tensor(self):
    """Returns the global_step from the default graph.

    Returns:
      The global step `Tensor` or `None`.
    """
    try:
      gs = ops.get_default_graph().get_tensor_by_name("global_step:0")
      if gs.dtype.base_dtype in [dtypes.int32, dtypes.int64]:
        return gs
      else:
        logging.warning("Found 'global_step' is not an int type: %s", gs.dtype)
        return None
    except KeyError:
      return None