Python tensorflow.python.util.compat.as_bytes() Examples

The following are 30 code examples of tensorflow.python.util.compat.as_bytes(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.util.compat , or try the search function .
Example #1
Source File: ops.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def colocation_groups(self):
    """Returns the list of colocation groups of the op."""
    default_colocation_group = [compat.as_bytes("loc:@%s" %
                                                self._node_def.name)]
    if "_class" not in self._node_def.attr:
      # This op has no explicit colocation group, so it is itself its
      # own root of a colocation group.
      return default_colocation_group

    attr_groups = [class_name
                   for class_name in self.get_attr("_class")
                   if class_name.startswith(b"loc:@")]

    # If there are no colocation groups in the explicit _class field,
    # return the default colocation group.
    return attr_groups if attr_groups else default_colocation_group 
Example #2
Source File: file_io.py    From lambda-packs with MIT License 6 votes vote down vote up
def file_exists(filename):
  """Determines whether a path exists or not.

  Args:
    filename: string, a path

  Returns:
    True if the path exists, whether its a file or a directory.
    False if the path does not exist and there are no filesystem errors.

  Raises:
    errors.OpError: Propagates any errors reported by the FileSystem API.
  """
  try:
    with errors.raise_exception_on_not_ok_status() as status:
      pywrap_tensorflow.FileExists(compat.as_bytes(filename), status)
  except errors.NotFoundError:
    return False
  return True 
Example #3
Source File: file_io.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def get_matching_files(filename):
  """Returns a list of files that match the given pattern.

  Args:
    filename: string, the pattern

  Returns:
    Returns a list of strings containing filenames that match the given pattern.

  Raises:
    errors.OpError: If there are filesystem / directory listing errors.
  """
  with errors.raise_exception_on_not_ok_status() as status:
    # Convert each element to string, since the return values of the
    # vector of string should be interpreted as strings, not bytes.
    return [compat.as_str_any(matching_filename)
            for matching_filename in pywrap_tensorflow.GetMatchingFiles(
                compat.as_bytes(filename), status)] 
Example #4
Source File: file_io.py    From lambda-packs with MIT License 6 votes vote down vote up
def list_directory(dirname):
  """Returns a list of entries contained within a directory.

  The list is in arbitrary order. It does not contain the special entries "."
  and "..".

  Args:
    dirname: string, path to a directory

  Returns:
    [filename1, filename2, ... filenameN] as strings

  Raises:
    errors.NotFoundError if directory doesn't exist
  """
  if not is_directory(dirname):
    raise errors.NotFoundError(None, None, "Could not find directory")
  with errors.raise_exception_on_not_ok_status() as status:
    # Convert each element to string, since the return values of the
    # vector of string should be interpreted as strings, not bytes.
    return [
        compat.as_str_any(filename)
        for filename in pywrap_tensorflow.GetChildren(
            compat.as_bytes(dirname), status)
    ] 
Example #5
Source File: file_io.py    From lambda-packs with MIT License 6 votes vote down vote up
def stat(filename):
  """Returns file statistics for a given path.

  Args:
    filename: string, path to a file

  Returns:
    FileStatistics struct that contains information about the path

  Raises:
    errors.OpError: If the operation fails.
  """
  file_statistics = pywrap_tensorflow.FileStatistics()
  with errors.raise_exception_on_not_ok_status() as status:
    pywrap_tensorflow.Stat(compat.as_bytes(filename), file_statistics, status)
    return file_statistics 
Example #6
Source File: export.py    From lambda-packs with MIT License 6 votes vote down vote up
def get_timestamped_export_dir(export_dir_base):
  """Builds a path to a new subdirectory within the base directory.

  Each export is written into a new subdirectory named using the
  current time.  This guarantees monotonically increasing version
  numbers even across multiple runs of the pipeline.
  The timestamp used is the number of seconds since epoch UTC.

  Args:
    export_dir_base: A string containing a directory to write the exported
        graph and checkpoints.
  Returns:
    The full path of the new subdirectory (which is not actually created yet).
  """
  export_timestamp = int(time.time())

  export_dir = os.path.join(
      compat.as_bytes(export_dir_base),
      compat.as_bytes(str(export_timestamp)))
  return export_dir 
Example #7
Source File: file_io.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def list_directory(dirname):
  """Returns a list of entries contained within a directory.

  The list is in arbitrary order. It does not contain the special entries "."
  and "..".

  Args:
    dirname: string, path to a directory

  Returns:
    [filename1, filename2, ... filenameN] as strings

  Raises:
    errors.NotFoundError if directory doesn't exist
  """
  if not is_directory(dirname):
    raise errors.NotFoundError(None, None, "Could not find directory")
  with errors.raise_exception_on_not_ok_status() as status:
    # Convert each element to string, since the return values of the
    # vector of string should be interpreted as strings, not bytes.
    return [
        compat.as_str_any(filename)
        for filename in pywrap_tensorflow.GetChildren(
            compat.as_bytes(dirname), status)
    ] 
Example #8
Source File: file_io.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def stat(filename):
  """Returns file statistics for a given path.

  Args:
    filename: string, path to a file

  Returns:
    FileStatistics struct that contains information about the path

  Raises:
    errors.OpError: If the operation fails.
  """
  file_statistics = pywrap_tensorflow.FileStatistics()
  with errors.raise_exception_on_not_ok_status() as status:
    pywrap_tensorflow.Stat(compat.as_bytes(filename), file_statistics, status)
    return file_statistics 
Example #9
Source File: experiment.py    From lambda-packs with MIT License 6 votes vote down vote up
def _maybe_export(self, eval_result, checkpoint_path=None):
    """Export the Estimator using export_fn, if defined."""
    export_dir_base = os.path.join(
        compat.as_bytes(self._estimator.model_dir),
        compat.as_bytes("export"))

    export_results = []
    for strategy in self._export_strategies:
      export_results.append(
          strategy.export(
              self._estimator,
              os.path.join(
                  compat.as_bytes(export_dir_base),
                  compat.as_bytes(strategy.name)),
              checkpoint_path=checkpoint_path,
              eval_result=eval_result))

    return export_results 
Example #10
Source File: saved_model_export_utils.py    From lambda-packs with MIT License 6 votes vote down vote up
def get_timestamped_export_dir(export_dir_base):
  """Builds a path to a new subdirectory within the base directory.

  Each export is written into a new subdirectory named using the
  current time.  This guarantees monotonically increasing version
  numbers even across multiple runs of the pipeline.
  The timestamp used is the number of seconds since epoch UTC.

  Args:
    export_dir_base: A string containing a directory to write the exported
        graph and checkpoints.
  Returns:
    The full path of the new subdirectory (which is not actually created yet).
  """
  export_timestamp = int(time.time())

  export_dir = os.path.join(
      compat.as_bytes(export_dir_base),
      compat.as_bytes(str(export_timestamp)))
  return export_dir


# create a simple parser that pulls the export_version from the directory. 
Example #11
Source File: pywrap_tensorflow.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def has_tensor(self, tensor_str):
      from tensorflow.python.util import compat
      return self._HasTensor(compat.as_bytes(tensor_str)) 
Example #12
Source File: ops.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _NodeDef(op_type, name, device=None, attrs=None):
  """Create a NodeDef proto.

  Args:
    op_type: Value for the "op" attribute of the NodeDef proto.
    name: Value for the "name" attribute of the NodeDef proto.
    device: string, device, or function from NodeDef to string.
      Value for the "device" attribute of the NodeDef proto.
    attrs: Optional dictionary where the key is the attribute name (a string)
      and the value is the respective "attr" attribute of the NodeDef proto (an
      AttrValue).

  Returns:
    A node_def_pb2.NodeDef protocol buffer.
  """
  node_def = node_def_pb2.NodeDef()
  node_def.op = compat.as_bytes(op_type)
  node_def.name = compat.as_bytes(name)
  if attrs is not None:
    for k, v in six.iteritems(attrs):
      node_def.attr[k].CopyFrom(v)
  if device is not None:
    if callable(device):
      node_def.device = device(node_def)
    else:
      node_def.device = _device_string(device)
  return node_def


# Copied from core/framework/node_def_util.cc
# TODO(mrry,josh11b): Consolidate this validation in C++ code. 
Example #13
Source File: tensor_util.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def SlowAppendObjectArrayToTensorProto(tensor_proto, proto_values):
    tensor_proto.string_val.extend([compat.as_bytes(x) for x in proto_values]) 
Example #14
Source File: event_file_writer.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def __init__(self, logdir, max_queue=10, flush_secs=120):
    """Creates a `EventFileWriter` and an event file to write to.

    On construction the summary writer creates a new event file in `logdir`.
    This event file will contain `Event` protocol buffers, which are written to
    disk via the add_event method.

    The other arguments to the constructor control the asynchronous writes to
    the event file:

    *  `flush_secs`: How often, in seconds, to flush the added summaries
       and events to disk.
    *  `max_queue`: Maximum number of summaries or events pending to be
       written to disk before one of the 'add' calls block.

    Args:
      logdir: A string. Directory where event file will be written.
      max_queue: Integer. Size of the queue for pending events and summaries.
      flush_secs: Number. How often, in seconds, to flush the
        pending events and summaries to disk.
    """
    self._logdir = logdir
    if not gfile.IsDirectory(self._logdir):
      gfile.MakeDirs(self._logdir)
    self._event_queue = six.moves.queue.Queue(max_queue)
    self._ev_writer = pywrap_tensorflow.EventsWriter(
        compat.as_bytes(os.path.join(self._logdir, "events")))
    self._closed = False
    self._worker = _EventLoggerThread(self._event_queue, self._ev_writer,
                                      flush_secs)

    self._worker.start() 
Example #15
Source File: tf_record.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def tf_record_iterator(path, options=None):
  """An iterator that read the records from a TFRecords file.

  Args:
    path: The path to the TFRecords file.
    options: (optional) A TFRecordOptions object.

  Yields:
    Strings.

  Raises:
    IOError: If `path` cannot be opened for reading.
  """
  compression_type = TFRecordOptions.get_compression_type_string(options)
  with errors.raise_exception_on_not_ok_status() as status:
    reader = pywrap_tensorflow.PyRecordReader_New(
        compat.as_bytes(path), 0, compat.as_bytes(compression_type), status)

  if reader is None:
    raise IOError("Could not open %s." % path)
  while True:
    try:
      with errors.raise_exception_on_not_ok_status() as status:
        reader.GetNext(status)
    except errors.OutOfRangeError:
      break
    yield reader.record()
  reader.Close() 
Example #16
Source File: file_io.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def is_directory(dirname):
  """Returns whether the path is a directory or not.

  Args:
    dirname: string, path to a potential directory

  Returns:
    True, if the path is a directory; False otherwise
  """
  try:
    status = pywrap_tensorflow.TF_NewStatus()
    return pywrap_tensorflow.IsDirectory(compat.as_bytes(dirname), status)
  finally:
    pywrap_tensorflow.TF_DeleteStatus(status) 
Example #17
Source File: file_io.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def delete_recursively(dirname):
  """Deletes everything under dirname recursively.

  Args:
    dirname: string, a path to a directory

  Raises:
    errors.OpError: If the operation fails.
  """
  with errors.raise_exception_on_not_ok_status() as status:
    pywrap_tensorflow.DeleteRecursively(compat.as_bytes(dirname), status) 
Example #18
Source File: event_file_loader.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def __init__(self, file_path):
    if file_path is None:
      raise ValueError('A file path is required')
    file_path = resource_loader.readahead_file_path(file_path)
    logging.debug('Opening a record reader pointing at %s', file_path)
    with errors.raise_exception_on_not_ok_status() as status:
      self._reader = pywrap_tensorflow.PyRecordReader_New(
          compat.as_bytes(file_path), 0, compat.as_bytes(''), status)
    # Store it for logging purposes.
    self._file_path = file_path
    if not self._reader:
      raise IOError('Failed to open a record reader pointing to %s' % file_path) 
Example #19
Source File: event_file_loader.py    From lambda-packs with MIT License 5 votes vote down vote up
def __init__(self, file_path):
    if file_path is None:
      raise ValueError('A file path is required')
    file_path = resource_loader.readahead_file_path(file_path)
    logging.debug('Opening a record reader pointing at %s', file_path)
    with errors.raise_exception_on_not_ok_status() as status:
      self._reader = pywrap_tensorflow.PyRecordReader_New(
          compat.as_bytes(file_path), 0, compat.as_bytes(''), status)
    # Store it for logging purposes.
    self._file_path = file_path
    if not self._reader:
      raise IOError('Failed to open a record reader pointing to %s' % file_path) 
Example #20
Source File: session.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def __init__(self, graph, fetches, feeds):
    """Creates a fetch handler.

    Args:
      graph: Graph of the fetches.   Used to check for fetchability
        and to convert all fetches to tensors or ops as needed.
      fetches: An arbitrary fetch structure: singleton, list, tuple,
        namedtuple, or dict.
      feeds: A feed dict where keys are fully resolved tensor names.
    """
    with graph.as_default():
      self._fetch_mapper = _FetchMapper.for_fetch(fetches)
    self._fetches = []
    self._targets = []
    self._feeds = feeds
    self._ops = []
    self._fetch_handles = {}
    for fetch in self._fetch_mapper.unique_fetches():
      fetch_name = compat.as_bytes(fetch.name)
      if isinstance(fetch, ops.Operation):
        self._assert_fetchable(graph, fetch)
        self._targets.append(fetch_name)
        self._ops.append(True)
      else:
        self._assert_fetchable(graph, fetch.op)
        self._fetches.append(fetch_name)
        self._ops.append(False)
      # Remember the fetch if it is for a tensor handle.
      if isinstance(fetch, ops.Tensor) and fetch.op.type == 'GetSessionHandle':
        self._fetch_handles[fetch_name] = fetch.op.inputs[0].dtype
    self._final_fetches = [x for x in self._fetches if x not in feeds] 
Example #21
Source File: pywrap_tensorflow.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def get_tensor(self, tensor_str):
      from tensorflow.python.framework import errors
      with errors.raise_exception_on_not_ok_status() as status:
        from tensorflow.python.util import compat
        return CheckpointReader_GetTensor(self, compat.as_bytes(tensor_str),
                                          status) 
Example #22
Source File: builder_impl.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _save_and_write_assets(self, assets_collection_to_add=None):
    """Saves asset to the meta graph and writes asset files to disk.

    Args:
      assets_collection_to_add: The collection where the asset paths are setup.
    """
    asset_source_filepath_list = self._maybe_save_assets(
        assets_collection_to_add)

    # Return if there are no assets to write.
    if len(asset_source_filepath_list) is 0:
      tf_logging.info("No assets to write.")
      return

    assets_destination_dir = os.path.join(
        compat.as_bytes(self._export_dir),
        compat.as_bytes(constants.ASSETS_DIRECTORY))

    if not file_io.file_exists(assets_destination_dir):
      file_io.recursive_create_dir(assets_destination_dir)

    # Copy each asset from source path to destination path.
    for asset_source_filepath in asset_source_filepath_list:
      asset_source_filename = os.path.basename(asset_source_filepath)

      asset_destination_filepath = os.path.join(
          compat.as_bytes(assets_destination_dir),
          compat.as_bytes(asset_source_filename))

      # Only copy the asset file to the destination if it does not already
      # exist. This is to ensure that an asset with the same name defined as
      # part of multiple graphs is only copied the first time.
      if not file_io.file_exists(asset_destination_filepath):
        file_io.copy(asset_source_filepath, asset_destination_filepath)

    tf_logging.info("Assets written to: %s", assets_destination_dir) 
Example #23
Source File: pywrap_tensorflow.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def NewCheckpointReader(filepattern):
  from tensorflow.python.framework import errors
  with errors.raise_exception_on_not_ok_status() as status:
    from tensorflow.python.util import compat
    return CheckpointReader(compat.as_bytes(filepattern), status) 
Example #24
Source File: saved_model_test.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def testAssets(self):
    export_dir = os.path.join(test.get_temp_dir(), "test_assets")
    builder = saved_model_builder.SavedModelBuilder(export_dir)

    with self.test_session(graph=ops.Graph()) as sess:
      self._init_and_validate_variable(sess, "v", 42)

      # Build an asset collection.
      ignored_filepath = os.path.join(
          compat.as_bytes(test.get_temp_dir()), compat.as_bytes("ignored.txt"))
      file_io.write_string_to_file(ignored_filepath, "will be ignored")

      asset_collection = self._build_asset_collection("hello42.txt",
                                                      "foo bar baz",
                                                      "asset_file_tensor")

      builder.add_meta_graph_and_variables(
          sess, ["foo"], assets_collection=asset_collection)

    # Save the SavedModel to disk.
    builder.save()

    with self.test_session(graph=ops.Graph()) as sess:
      foo_graph = loader.load(sess, ["foo"], export_dir)
      self._validate_asset_collection(export_dir, foo_graph.collection_def,
                                      "hello42.txt", "foo bar baz",
                                      "asset_file_tensor:0")
      ignored_asset_path = os.path.join(
          compat.as_bytes(export_dir),
          compat.as_bytes(constants.ASSETS_DIRECTORY),
          compat.as_bytes("ignored.txt"))
      self.assertFalse(file_io.file_exists(ignored_asset_path)) 
Example #25
Source File: logger.py    From HardRLWithYoutube with MIT License 5 votes vote down vote up
def __init__(self, dir):
        os.makedirs(dir, exist_ok=True)
        self.dir = dir
        self.step = 1
        prefix = 'events'
        path = osp.join(osp.abspath(dir), prefix)
        import tensorflow as tf
        from tensorflow.python import pywrap_tensorflow
        from tensorflow.core.util import event_pb2
        from tensorflow.python.util import compat
        self.tf = tf
        self.event_pb2 = event_pb2
        self.pywrap_tensorflow = pywrap_tensorflow
        self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path)) 
Example #26
Source File: exporter.py    From lambda-packs with MIT License 5 votes vote down vote up
def gfile_copy_callback(files_to_copy, export_dir_path):
  """Callback to copy files using `gfile.Copy` to an export directory.

  This method is used as the default `assets_callback` in `Exporter.init` to
  copy assets from the `assets_collection`. It can also be invoked directly to
  copy additional supplementary files into the export directory (in which case
  it is not a callback).

  Args:
    files_to_copy: A dictionary that maps original file paths to desired
      basename in the export directory.
    export_dir_path: Directory to copy the files to.
  """
  logging.info("Write assets into: %s using gfile_copy.", export_dir_path)
  gfile.MakeDirs(export_dir_path)
  for source_filepath, basename in files_to_copy.items():
    new_path = os.path.join(
        compat.as_bytes(export_dir_path), compat.as_bytes(basename))
    logging.info("Copying asset %s to path %s.", source_filepath, new_path)

    if gfile.Exists(new_path):
      # Guard against being restarted while copying assets, and the file
      # existing and being in an unknown state.
      # TODO(b/28676216): Do some file checks before deleting.
      logging.info("Removing file %s.", new_path)
      gfile.Remove(new_path)
    gfile.Copy(source_filepath, new_path) 
Example #27
Source File: saved_model_test.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _build_asset_collection(self, asset_file_name, asset_file_contents,
                              asset_file_tensor_name):
    asset_filepath = os.path.join(
        compat.as_bytes(test.get_temp_dir()), compat.as_bytes(asset_file_name))
    file_io.write_string_to_file(asset_filepath, asset_file_contents)
    asset_file_tensor = constant_op.constant(
        asset_filepath, name=asset_file_tensor_name)
    ops.add_to_collection(ops.GraphKeys.ASSET_FILEPATHS, asset_file_tensor)
    asset_collection = ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS)
    return asset_collection 
Example #28
Source File: loader_impl.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _get_asset_tensors(export_dir, meta_graph_def_to_load):
  """Gets the asset tensors, if defined in the meta graph def to load.

  Args:
    export_dir: Directory where the SavedModel is located.
    meta_graph_def_to_load: The meta graph def from the SavedModel to be loaded.

  Returns:
    A dictionary of asset tensors, keyed by the name of the asset tensor. The
    value in the map corresponds to the absolute path of the asset file.
  """
  # Collection-def that may contain the assets key.
  collection_def = meta_graph_def_to_load.collection_def

  asset_tensor_dict = {}
  if constants.ASSETS_KEY in collection_def:
    # Location of the assets for SavedModel.
    assets_directory = os.path.join(
        compat.as_bytes(export_dir),
        compat.as_bytes(constants.ASSETS_DIRECTORY))
    assets_any_proto = collection_def[constants.ASSETS_KEY].any_list.value
    # Process each asset and add it to the asset tensor dictionary.
    for asset_any_proto in assets_any_proto:
      asset_proto = meta_graph_pb2.AssetFileDef()
      asset_any_proto.Unpack(asset_proto)
      asset_tensor_dict[asset_proto.tensor_info.name] = os.path.join(
          compat.as_bytes(assets_directory),
          compat.as_bytes(asset_proto.filename))
  return asset_tensor_dict 
Example #29
Source File: builder_impl.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def save(self, as_text=False):
    """Writes a `SavedModel` protocol buffer to disk.

    The function writes the SavedModel protocol buffer to the export directory
    in serialized format.

    Args:
      as_text: Writes the SavedModel protocol buffer in text format to disk.

    Returns:
      The path to which the SavedModel protocol buffer was written.
    """
    if not file_io.file_exists(self._export_dir):
      file_io.recursive_create_dir(self._export_dir)

    if as_text:
      path = os.path.join(
          compat.as_bytes(self._export_dir),
          compat.as_bytes(constants.SAVED_MODEL_FILENAME_PBTXT))
      file_io.write_string_to_file(path, str(self._saved_model))
    else:
      path = os.path.join(
          compat.as_bytes(self._export_dir),
          compat.as_bytes(constants.SAVED_MODEL_FILENAME_PB))
      file_io.write_string_to_file(path, self._saved_model.SerializeToString())
    tf_logging.info("SavedModel written to: %s", path)

    return path 
Example #30
Source File: hparam.py    From lambda-packs with MIT License 5 votes vote down vote up
def to_proto(self, export_scope=None):  # pylint: disable=unused-argument
    """Converts a `HParams` object to a `HParamDef` protocol buffer.

    Args:
      export_scope: Optional `string`. Name scope to remove.

    Returns:
      A `HParamDef` protocol buffer.
    """
    hparam_proto = hparam_pb2.HParamDef()
    for name in self._hparam_types:
      # Parse the values.
      param_type, is_list = self._hparam_types.get(name, (None, None))
      kind = HParams._get_kind_name(param_type, is_list)

      if is_list:
        if kind.startswith('bytes'):
          v_list = [compat.as_bytes(v) for v in getattr(self, name)]
        else:
          v_list = [v for v in getattr(self, name)]
        getattr(hparam_proto.hparam[name], kind).value.extend(v_list)
      else:
        v = getattr(self, name)
        if kind.startswith('bytes'):
          v = compat.as_bytes(getattr(self, name))
        setattr(hparam_proto.hparam[name], kind, v)

    return hparam_proto