Python tensorflow.python.lib.io.file_io.recursive_create_dir() Examples

The following are 30 code examples of tensorflow.python.lib.io.file_io.recursive_create_dir(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.python.lib.io.file_io , or try the search function .
Example #1
Source File: config_util.py    From Live-feed-object-device-identification-using-Tensorflow-and-OpenCV with Apache License 2.0 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #2
Source File: task.py    From pydatalab with Apache License 2.0 6 votes vote down vote up
def local_analysis(args):
  if args.analysis:
    # Already analyzed.
    return

  if not args.schema or not args.features:
    raise ValueError('Either --analysis, or both --schema and --features are provided.')

  tf_config = json.loads(os.environ.get('TF_CONFIG', '{}'))
  cluster_spec = tf_config.get('cluster', {})
  if len(cluster_spec.get('worker', [])) > 0:
    raise ValueError('If "schema" and "features" are provided, local analysis will run and ' +
                     'only BASIC scale-tier (no workers node) is supported.')

  if cluster_spec and not (args.schema.startswith('gs://') and args.features.startswith('gs://')):
    raise ValueError('Cloud trainer requires GCS paths for --schema and --features.')

  print('Running analysis.')
  schema = json.loads(file_io.read_file_to_string(args.schema).decode())
  features = json.loads(file_io.read_file_to_string(args.features).decode())
  args.analysis = os.path.join(args.job_dir, 'analysis')
  args.transform = True
  file_io.recursive_create_dir(args.analysis)
  feature_analysis.run_local_analysis(args.analysis, args.train, schema, features)
  print('Analysis done.') 
Example #3
Source File: test_training.py    From pydatalab with Apache License 2.0 6 votes vote down vote up
def setUp(self):
    self._test_dir = tempfile.mkdtemp()

    self._analysis_output = os.path.join(self._test_dir, 'analysis_output')
    self._transform_output = os.path.join(self._test_dir, 'transform_output')
    self._train_output = os.path.join(self._test_dir, 'train_output')

    file_io.recursive_create_dir(self._analysis_output)
    file_io.recursive_create_dir(self._transform_output)
    file_io.recursive_create_dir(self._train_output)

    self._csv_train_filename = os.path.join(self._test_dir, 'train_csv_data.csv')
    self._csv_eval_filename = os.path.join(self._test_dir, 'eval_csv_data.csv')
    self._csv_predict_filename = os.path.join(self._test_dir, 'predict_csv_data.csv')
    self._schema_filename = os.path.join(self._test_dir, 'schema_file.json')
    self._features_filename = os.path.join(self._test_dir, 'features_file.json') 
Example #4
Source File: test_cloud_workflow.py    From pydatalab with Apache License 2.0 6 votes vote down vote up
def setUp(self):
    random.seed(12321)
    self._local_dir = tempfile.mkdtemp()  # Local folder for temp files.
    self._gs_dir = 'gs://temp_pydatalab_test_%s' % uuid.uuid4().hex
    subprocess.check_call('gsutil mb %s' % self._gs_dir, shell=True)

    self._input_files = os.path.join(self._gs_dir, 'input_files')

    self._analysis_output = os.path.join(self._gs_dir, 'analysis_output')
    self._transform_output = os.path.join(self._gs_dir, 'transform_output')
    self._train_output = os.path.join(self._gs_dir, 'train_output')
    self._prediction_output = os.path.join(self._gs_dir, 'prediction_output')

    file_io.recursive_create_dir(self._input_files)

    self._csv_train_filename = os.path.join(self._input_files, 'train_csv_data.csv')
    self._csv_eval_filename = os.path.join(self._input_files, 'eval_csv_data.csv')
    self._csv_predict_filename = os.path.join(self._input_files, 'predict_csv_data.csv')
    self._schema_filename = os.path.join(self._input_files, 'schema_file.json')
    self._features_filename = os.path.join(self._input_files, 'features_file.json')

    self._image_files = None 
Example #5
Source File: config_util.py    From Gun-Detector with Apache License 2.0 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #6
Source File: saved_model_half_plus_two.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def _write_assets(assets_directory, assets_filename):
  """Writes asset files to be used with SavedModel for half plus two.

  Args:
    assets_directory: The directory to which the assets should be written.
    assets_filename: Name of the file to which the asset contents should be
        written.

  Returns:
    The path to which the assets file was written.
  """
  if not file_io.file_exists(assets_directory):
    file_io.recursive_create_dir(assets_directory)

  path = os.path.join(
      compat.as_bytes(assets_directory), compat.as_bytes(assets_filename))
  file_io.write_string_to_file(path, "asset-file-contents")
  return path 
Example #7
Source File: builder.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def __init__(self, export_dir):
    self._saved_model = saved_model_pb2.SavedModel()
    self._saved_model.saved_model_schema_version = (
        constants.SAVED_MODEL_SCHEMA_VERSION)

    self._export_dir = export_dir
    if file_io.file_exists(export_dir):
      raise AssertionError(
          "Export directory already exists. Please specify a different export "
          "directory.")

    file_io.recursive_create_dir(self._export_dir)

    # Boolean to track whether variables and assets corresponding to the
    # SavedModel have been saved. Specifically, the first meta graph to be added
    # MUST use the add_meta_graph_and_variables() API. Subsequent add operations
    # on the SavedModel MUST use the add_meta_graph() API which does not save
    # weights.
    self._has_saved_variables = False 
Example #8
Source File: config_util.py    From ros_tensorflow with Apache License 2.0 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #9
Source File: config_util.py    From BMW-TensorFlow-Training-GUI with Apache License 2.0 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #10
Source File: builder_impl.py    From lambda-packs with MIT License 6 votes vote down vote up
def __init__(self, export_dir):
    self._saved_model = saved_model_pb2.SavedModel()
    self._saved_model.saved_model_schema_version = (
        constants.SAVED_MODEL_SCHEMA_VERSION)

    self._export_dir = export_dir
    if file_io.file_exists(export_dir):
      raise AssertionError(
          "Export directory already exists. Please specify a different export "
          "directory: %s" % export_dir)

    file_io.recursive_create_dir(self._export_dir)

    # Boolean to track whether variables and assets corresponding to the
    # SavedModel have been saved. Specifically, the first meta graph to be added
    # MUST use the add_meta_graph_and_variables() API. Subsequent add operations
    # on the SavedModel MUST use the add_meta_graph() API which does not save
    # weights.
    self._has_saved_variables = False 
Example #11
Source File: config_util.py    From MAX-Object-Detector with Apache License 2.0 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #12
Source File: config_util.py    From g-tensorflow-models with Apache License 2.0 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #13
Source File: config_util.py    From models with Apache License 2.0 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #14
Source File: saved_model_half_plus_two.py    From jetson with MIT License 6 votes vote down vote up
def _write_assets(assets_directory, assets_filename):
  """Writes asset files to be used with SavedModel for half plus two.

  Args:
    assets_directory: The directory to which the assets should be written.
    assets_filename: Name of the file to which the asset contents should be
        written.

  Returns:
    The path to which the assets file was written.
  """
  if not file_io.file_exists(assets_directory):
    file_io.recursive_create_dir(assets_directory)

  path = os.path.join(
      tf.compat.as_bytes(assets_directory), tf.compat.as_bytes(assets_filename))
  file_io.write_string_to_file(path, "asset-file-contents")
  return path 
Example #15
Source File: builder_impl.py    From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License 6 votes vote down vote up
def __init__(self, export_dir):
    self._saved_model = saved_model_pb2.SavedModel()
    self._saved_model.saved_model_schema_version = (
        constants.SAVED_MODEL_SCHEMA_VERSION)

    self._export_dir = export_dir
    if file_io.file_exists(export_dir):
      raise AssertionError(
          "Export directory already exists. Please specify a different export "
          "directory: %s" % export_dir)

    file_io.recursive_create_dir(self._export_dir)

    # Boolean to track whether variables and assets corresponding to the
    # SavedModel have been saved. Specifically, the first meta graph to be added
    # MUST use the add_meta_graph_and_variables() API. Subsequent add operations
    # on the SavedModel MUST use the add_meta_graph() API which does not save
    # weights.
    self._has_saved_variables = False 
Example #16
Source File: config_util.py    From multilabel-image-classification-tensorflow with MIT License 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #17
Source File: builder_impl.py    From keras-lambda with MIT License 6 votes vote down vote up
def __init__(self, export_dir):
    self._saved_model = saved_model_pb2.SavedModel()
    self._saved_model.saved_model_schema_version = (
        constants.SAVED_MODEL_SCHEMA_VERSION)

    self._export_dir = export_dir
    if file_io.file_exists(export_dir):
      raise AssertionError(
          "Export directory already exists. Please specify a different export "
          "directory: %s" % export_dir)

    file_io.recursive_create_dir(self._export_dir)

    # Boolean to track whether variables and assets corresponding to the
    # SavedModel have been saved. Specifically, the first meta graph to be added
    # MUST use the add_meta_graph_and_variables() API. Subsequent add operations
    # on the SavedModel MUST use the add_meta_graph() API which does not save
    # weights.
    self._has_saved_variables = False 
Example #18
Source File: task.py    From pydatalab with Apache License 2.0 6 votes vote down vote up
def recursive_copy(src_dir, dest_dir):
  """Copy the contents of src_dir into the folder dest_dir.
  Args:
    src_dir: gsc or local path.
    dest_dir: gcs or local path.
  """

  file_io.recursive_create_dir(dest_dir)
  for file_name in file_io.list_directory(src_dir):
    old_path = os.path.join(src_dir, file_name)
    new_path = os.path.join(dest_dir, file_name)

    if file_io.is_directory(old_path):
      recursive_copy(old_path, new_path)
    else:
      file_io.copy(old_path, new_path, overwrite=True) 
Example #19
Source File: _job.py    From tensorfx with Apache License 2.0 6 votes vote down vote up
def start(self):
    """Performs startup logic, including building graphs.
    """
    if self._config.master:
      # Save out job information for later reference alongside all other outputs.
      job_args = ' '.join(self._model_builder.args._args).replace(' --', '\n--').split('\n')
      job_info = {
        'config': self._config._env,
        'args': job_args
      }
      job_spec = yaml.safe_dump(job_info, default_flow_style=False)
      job_file = os.path.join(self._output, 'job.yaml')

      tfio.recursive_create_dir(self._output)
      tfio.write_string_to_file(job_file, job_spec)

      # Create a checkpoints directory. This is needed to ensure checkpoint restoration logic
      # can lookup an existing directory.
      tfio.recursive_create_dir(self.checkpoints_path)

    # Build the graphs that will be used during the course of the job.
    self._training, self._evaluation, self._prediction = \
      self._model_builder.build_graph_interfaces(self._inputs, self._config) 
Example #20
Source File: config_util.py    From vehicle_counting_tensorflow with MIT License 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #21
Source File: config_util.py    From Person-Detection-and-Tracking with MIT License 6 votes vote down vote up
def save_pipeline_config(pipeline_config, directory):
  """Saves a pipeline config text file to disk.

  Args:
    pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
    directory: The model directory into which the pipeline config file will be
      saved.
  """
  if not file_io.file_exists(directory):
    file_io.recursive_create_dir(directory)
  pipeline_config_path = os.path.join(directory, "pipeline.config")
  config_text = text_format.MessageToString(pipeline_config)
  with tf.gfile.Open(pipeline_config_path, "wb") as f:
    tf.logging.info("Writing pipeline config file to %s",
                    pipeline_config_path)
    f.write(config_text) 
Example #22
Source File: task.py    From pydatalab with Apache License 2.0 6 votes vote down vote up
def local_analysis(args):
  if args.analysis:
    # Already analyzed.
    return

  if not args.schema or not args.features:
    raise ValueError('Either --analysis, or both --schema and --features are provided.')

  tf_config = json.loads(os.environ.get('TF_CONFIG', '{}'))
  cluster_spec = tf_config.get('cluster', {})
  if len(cluster_spec.get('worker', [])) > 0:
    raise ValueError('If "schema" and "features" are provided, local analysis will run and ' +
                     'only BASIC scale-tier (no workers node) is supported.')

  if cluster_spec and not (args.schema.startswith('gs://') and args.features.startswith('gs://')):
    raise ValueError('Cloud trainer requires GCS paths for --schema and --features.')

  print('Running analysis.')
  schema = json.loads(file_io.read_file_to_string(args.schema).decode())
  features = json.loads(file_io.read_file_to_string(args.features).decode())
  args.analysis = os.path.join(args.job_dir, 'analysis')
  args.transform = True
  file_io.recursive_create_dir(args.analysis)
  feature_analysis.run_local_analysis(args.analysis, args.train, schema, features)
  print('Analysis done.') 
Example #23
Source File: task.py    From pydatalab with Apache License 2.0 6 votes vote down vote up
def recursive_copy(src_dir, dest_dir):
  """Copy the contents of src_dir into the folder dest_dir.
  Args:
    src_dir: gsc or local path.
    dest_dir: gcs or local path.
  """

  file_io.recursive_create_dir(dest_dir)
  for file_name in file_io.list_directory(src_dir):
    old_path = os.path.join(src_dir, file_name)
    new_path = os.path.join(dest_dir, file_name)

    if file_io.is_directory(old_path):
      recursive_copy(old_path, new_path)
    else:
      file_io.copy(old_path, new_path, overwrite=True) 
Example #24
Source File: util.py    From pydatalab with Apache License 2.0 6 votes vote down vote up
def _recursive_copy(src_dir, dest_dir):
  """Copy the contents of src_dir into the folder dest_dir.
  Args:
    src_dir: gsc or local path.
    dest_dir: gcs or local path.
  When called, dest_dir should exist.
  """
  src_dir = python_portable_string(src_dir)
  dest_dir = python_portable_string(dest_dir)

  file_io.recursive_create_dir(dest_dir)
  for file_name in file_io.list_directory(src_dir):
    old_path = os.path.join(src_dir, file_name)
    new_path = os.path.join(dest_dir, file_name)

    if file_io.is_directory(old_path):
      _recursive_copy(old_path, new_path)
    else:
      file_io.copy(old_path, new_path, overwrite=True) 
Example #25
Source File: builder_impl.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def __init__(self, export_dir):
    self._saved_model = saved_model_pb2.SavedModel()
    self._saved_model.saved_model_schema_version = (
        constants.SAVED_MODEL_SCHEMA_VERSION)

    self._export_dir = export_dir
    if file_io.file_exists(export_dir):
      raise AssertionError(
          "Export directory already exists. Please specify a different export "
          "directory: %s" % export_dir)

    file_io.recursive_create_dir(self._export_dir)

    # Boolean to track whether variables and assets corresponding to the
    # SavedModel have been saved. Specifically, the first meta graph to be added
    # MUST use the add_meta_graph_and_variables() API. Subsequent add operations
    # on the SavedModel MUST use the add_meta_graph() API which does not save
    # weights.
    self._has_saved_variables = False 
Example #26
Source File: metadata_io_test.py    From transform with Apache License 2.0 5 votes vote down vote up
def _write_schema_to_disk(self, basedir, schema_string):
    version_basedir = os.path.join(basedir, 'v1-json')

    # Write a proto by hand to disk
    file_io.recursive_create_dir(version_basedir)
    file_io.write_string_to_file(os.path.join(version_basedir, 'schema.json'),
                                 schema_string) 
Example #27
Source File: test_analyze.py    From pydatalab with Apache License 2.0 5 votes vote down vote up
def test_numerics(self):
    test_folder = os.path.join(self._bucket_root, 'test_numerics')
    input_file_path = os.path.join(test_folder, 'input.csv')
    output_folder = os.path.join(test_folder, 'test_output')
    file_io.recursive_create_dir(output_folder)

    file_io.write_string_to_file(
      input_file_path,
      '\n'.join(['%s,%s,%s' % (i, 10 * i + 0.5, i) for i in range(100)]))

    schema = [{'name': 'col1', 'type': 'INTEGER'},
              {'name': 'col2', 'type': 'FLOAT'},
              {'name': 'col3', 'type': 'FLOAT'}]
    features = {'col1': {'transform': 'scale', 'source_column': 'col1'},
                'col2': {'transform': 'identity', 'source_column': 'col2'},
                'col3': {'transform': 'target'}}
    analyze.run_cloud_analysis(
        output_dir=output_folder,
        csv_file_pattern=input_file_path,
        bigquery_table=None,
        schema=schema,
        features=features)

    stats = json.loads(
        file_io.read_file_to_string(
            os.path.join(output_folder, analyze.constant.STATS_FILE)).decode())

    self.assertEqual(stats['num_examples'], 100)
    col = stats['column_stats']['col1']
    self.assertAlmostEqual(col['max'], 99.0)
    self.assertAlmostEqual(col['min'], 0.0)
    self.assertAlmostEqual(col['mean'], 49.5)

    col = stats['column_stats']['col2']
    self.assertAlmostEqual(col['max'], 990.5)
    self.assertAlmostEqual(col['min'], 0.5)
    self.assertAlmostEqual(col['mean'], 495.5) 
Example #28
Source File: builder_impl.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def save(self, as_text=False):
    """Writes a `SavedModel` protocol buffer to disk.

    The function writes the SavedModel protocol buffer to the export directory
    in serialized format.

    Args:
      as_text: Writes the SavedModel protocol buffer in text format to disk.

    Returns:
      The path to which the SavedModel protocol buffer was written.
    """
    if not file_io.file_exists(self._export_dir):
      file_io.recursive_create_dir(self._export_dir)

    if as_text:
      path = os.path.join(
          compat.as_bytes(self._export_dir),
          compat.as_bytes(constants.SAVED_MODEL_FILENAME_PBTXT))
      file_io.write_string_to_file(path, str(self._saved_model))
    else:
      path = os.path.join(
          compat.as_bytes(self._export_dir),
          compat.as_bytes(constants.SAVED_MODEL_FILENAME_PB))
      file_io.write_string_to_file(path, self._saved_model.SerializeToString())
    tf_logging.info("SavedModel written to: %s", path)

    return path 
Example #29
Source File: builder_impl.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def _save_and_write_assets(self, assets_collection_to_add=None):
    """Saves asset to the meta graph and writes asset files to disk.

    Args:
      assets_collection_to_add: The collection where the asset paths are setup.
    """
    asset_source_filepath_list = self._maybe_save_assets(
        assets_collection_to_add)

    # Return if there are no assets to write.
    if len(asset_source_filepath_list) is 0:
      tf_logging.info("No assets to write.")
      return

    assets_destination_dir = os.path.join(
        compat.as_bytes(self._export_dir),
        compat.as_bytes(constants.ASSETS_DIRECTORY))

    if not file_io.file_exists(assets_destination_dir):
      file_io.recursive_create_dir(assets_destination_dir)

    # Copy each asset from source path to destination path.
    for asset_source_filepath in asset_source_filepath_list:
      asset_source_filename = os.path.basename(asset_source_filepath)

      asset_destination_filepath = os.path.join(
          compat.as_bytes(assets_destination_dir),
          compat.as_bytes(asset_source_filename))

      # Only copy the asset file to the destination if it does not already
      # exist. This is to ensure that an asset with the same name defined as
      # part of multiple graphs is only copied the first time.
      if not file_io.file_exists(asset_destination_filepath):
        file_io.copy(asset_source_filepath, asset_destination_filepath)

    tf_logging.info("Assets written to: %s", assets_destination_dir) 
Example #30
Source File: builder_impl.py    From lambda-packs with MIT License 5 votes vote down vote up
def save(self, as_text=False):
    """Writes a `SavedModel` protocol buffer to disk.

    The function writes the SavedModel protocol buffer to the export directory
    in serialized format.

    Args:
      as_text: Writes the SavedModel protocol buffer in text format to disk.

    Returns:
      The path to which the SavedModel protocol buffer was written.
    """
    if not file_io.file_exists(self._export_dir):
      file_io.recursive_create_dir(self._export_dir)

    if as_text:
      path = os.path.join(
          compat.as_bytes(self._export_dir),
          compat.as_bytes(constants.SAVED_MODEL_FILENAME_PBTXT))
      file_io.write_string_to_file(path, str(self._saved_model))
    else:
      path = os.path.join(
          compat.as_bytes(self._export_dir),
          compat.as_bytes(constants.SAVED_MODEL_FILENAME_PB))
      file_io.write_string_to_file(path, self._saved_model.SerializeToString())
    tf_logging.info("SavedModel written to: %s", path)

    return path