Python tensorflow.python.lib.io.file_io.delete_recursively() Examples
The following are 8
code examples of tensorflow.python.lib.io.file_io.delete_recursively().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.lib.io.file_io
, or try the search function
.
Example #1
Source File: googletest.py From lambda-packs with MIT License | 6 votes |
def GetTempDir(): """Return a temporary directory for tests to use.""" global _googletest_temp_dir if not _googletest_temp_dir: first_frame = tf_inspect.stack()[-1][0] temp_dir = os.path.join(tempfile.gettempdir(), os.path.basename(tf_inspect.getfile(first_frame))) temp_dir = tempfile.mkdtemp(prefix=temp_dir.rstrip('.py')) def delete_temp_dir(dirname=temp_dir): try: file_io.delete_recursively(dirname) except errors.OpError as e: logging.error('Error removing %s: %s', dirname, e) atexit.register(delete_temp_dir) _googletest_temp_dir = temp_dir return _googletest_temp_dir
Example #2
Source File: googletest.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 6 votes |
def GetTempDir(): """Return a temporary directory for tests to use.""" global _googletest_temp_dir if not _googletest_temp_dir: first_frame = tf_inspect.stack()[-1][0] temp_dir = os.path.join(tempfile.gettempdir(), os.path.basename(tf_inspect.getfile(first_frame))) temp_dir = tempfile.mkdtemp(prefix=temp_dir.rstrip('.py')) def delete_temp_dir(dirname=temp_dir): try: file_io.delete_recursively(dirname) except errors.OpError as e: logging.error('Error removing %s: %s', dirname, e) atexit.register(delete_temp_dir) _googletest_temp_dir = temp_dir return _googletest_temp_dir
Example #3
Source File: saved_model_test.py From auto-alt-text-lambda-api with MIT License | 5 votes |
def tearDownModule(): file_io.delete_recursively(test.get_temp_dir())
Example #4
Source File: _ml.py From pydatalab with Apache License 2.0 | 5 votes |
def _analyze(args, cell): # For now, always run python2. If needed we can run python3 when the current kernel # is py3. Since now our transform cannot work on py3 anyway, I would rather run # everything with python2. cmd_args = ['python', 'analyze.py', '--output', _abs_path(args['output'])] if args['cloud']: cmd_args.append('--cloud') training_data = get_dataset_from_arg(args['data']) if args['cloud']: tmpdir = os.path.join(args['output'], 'tmp') else: tmpdir = tempfile.mkdtemp() try: if isinstance(training_data.train, datalab_ml.CsvDataSet): csv_data = training_data.train schema_file = _create_json_file(tmpdir, csv_data.schema, 'schema.json') for file_name in csv_data.input_files: cmd_args.append('--csv=' + _abs_path(file_name)) cmd_args.extend(['--schema', schema_file]) elif isinstance(training_data.train, datalab_ml.BigQueryDataSet): bq_data = training_data.train cmd_args.extend(['--bigquery', bq_data.table]) else: raise ValueError('Unexpected training data type. Only csv or bigquery are supported.') features = args['features'] features_file = _create_json_file(tmpdir, features, 'features.json') cmd_args.extend(['--features', features_file]) if args['package']: code_path = os.path.join(tmpdir, 'package') _archive.extract_archive(args['package'], code_path) else: code_path = DEFAULT_PACKAGE_PATH _shell_process.run_and_monitor(cmd_args, os.getpid(), cwd=code_path) finally: file_io.delete_recursively(tmpdir)
Example #5
Source File: saved_model_test.py From deep_image_model with Apache License 2.0 | 5 votes |
def tearDownModule(): file_io.delete_recursively(tf.test.get_temp_dir())
Example #6
Source File: gcs_smoke.py From deep_image_model with Apache License 2.0 | 5 votes |
def create_dir_test(): """Verifies file_io directory handling methods .""" starttime = int(round(time.time() * 1000)) dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime) print("Creating dir %s" % dir_name) file_io.create_dir(dir_name) elapsed = int(round(time.time() * 1000)) - starttime print("Created directory in: %d milliseconds" % elapsed) # Check that the directory exists. dir_exists = file_io.is_directory(dir_name) print("%s directory exists: %s" % (dir_name, dir_exists)) # List contents of just created directory. print("Listing directory %s." % dir_name) starttime = int(round(time.time() * 1000)) print(file_io.list_directory(dir_name)) elapsed = int(round(time.time() * 1000)) - starttime print("Listed directory %s in %s milliseconds" % (dir_name, elapsed)) # Delete directory. print("Deleting directory %s." % dir_name) starttime = int(round(time.time() * 1000)) file_io.delete_recursively(dir_name) elapsed = int(round(time.time() * 1000)) - starttime print("Deleted directory %s in %s milliseconds" % (dir_name, elapsed))
Example #7
Source File: gcs_smoke.py From deep_image_model with Apache License 2.0 | 5 votes |
def create_object_test(): """Verifies file_io's object manipulation methods .""" starttime = int(round(time.time() * 1000)) dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime) print("Creating dir %s." % dir_name) file_io.create_dir(dir_name) # Create a file in this directory. file_name = "%s/test_file.txt" % dir_name print("Creating file %s." % file_name) file_io.write_string_to_file(file_name, "test file creation.") list_files_pattern = "%s/test_file*.txt" % dir_name print("Getting files matching pattern %s." % list_files_pattern) files_list = file_io.get_matching_files(list_files_pattern) print(files_list) assert len(files_list) == 1 assert files_list[0] == file_name # Cleanup test files. print("Deleting file %s." % file_name) file_io.delete_file(file_name) # Delete directory. print("Deleting directory %s." % dir_name) file_io.delete_recursively(dir_name)
Example #8
Source File: saved_model_test.py From keras-lambda with MIT License | 5 votes |
def tearDownModule(): file_io.delete_recursively(test.get_temp_dir())