Python tensorflow.python.lib.io.file_io.create_dir() Examples
The following are 4
code examples of tensorflow.python.lib.io.file_io.create_dir().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.lib.io.file_io
, or try the search function
.
Example #1
Source File: gcs_smoke.py From deep_image_model with Apache License 2.0 | 5 votes |
def create_dir_test(): """Verifies file_io directory handling methods .""" starttime = int(round(time.time() * 1000)) dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime) print("Creating dir %s" % dir_name) file_io.create_dir(dir_name) elapsed = int(round(time.time() * 1000)) - starttime print("Created directory in: %d milliseconds" % elapsed) # Check that the directory exists. dir_exists = file_io.is_directory(dir_name) print("%s directory exists: %s" % (dir_name, dir_exists)) # List contents of just created directory. print("Listing directory %s." % dir_name) starttime = int(round(time.time() * 1000)) print(file_io.list_directory(dir_name)) elapsed = int(round(time.time() * 1000)) - starttime print("Listed directory %s in %s milliseconds" % (dir_name, elapsed)) # Delete directory. print("Deleting directory %s." % dir_name) starttime = int(round(time.time() * 1000)) file_io.delete_recursively(dir_name) elapsed = int(round(time.time() * 1000)) - starttime print("Deleted directory %s in %s milliseconds" % (dir_name, elapsed))
Example #2
Source File: gcs_smoke.py From deep_image_model with Apache License 2.0 | 5 votes |
def create_object_test(): """Verifies file_io's object manipulation methods .""" starttime = int(round(time.time() * 1000)) dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime) print("Creating dir %s." % dir_name) file_io.create_dir(dir_name) # Create a file in this directory. file_name = "%s/test_file.txt" % dir_name print("Creating file %s." % file_name) file_io.write_string_to_file(file_name, "test file creation.") list_files_pattern = "%s/test_file*.txt" % dir_name print("Getting files matching pattern %s." % list_files_pattern) files_list = file_io.get_matching_files(list_files_pattern) print(files_list) assert len(files_list) == 1 assert files_list[0] == file_name # Cleanup test files. print("Deleting file %s." % file_name) file_io.delete_file(file_name) # Delete directory. print("Deleting directory %s." % dir_name) file_io.delete_recursively(dir_name)
Example #3
Source File: test_transform.py From pydatalab with Apache License 2.0 | 4 votes |
def setUpClass(cls): # Set up dirs. cls.working_dir = tempfile.mkdtemp() cls.source_dir = os.path.join(cls.working_dir, 'source') cls.analysis_dir = os.path.join(cls.working_dir, 'analysis') cls.output_dir = os.path.join(cls.working_dir, 'output') file_io.create_dir(cls.source_dir) # Make test image files. img1_file = os.path.join(cls.source_dir, 'img1.jpg') image1 = Image.new('RGB', size=(300, 300), color=(155, 0, 0)) image1.save(img1_file) img2_file = os.path.join(cls.source_dir, 'img2.jpg') image2 = Image.new('RGB', size=(50, 50), color=(125, 240, 0)) image2.save(img2_file) img3_file = os.path.join(cls.source_dir, 'img3.jpg') image3 = Image.new('RGB', size=(800, 600), color=(33, 55, 77)) image3.save(img3_file) # Download inception checkpoint. Note that gs url doesn't work because # we may not have gcloud signed in when running the test. url = ('https://storage.googleapis.com/cloud-ml-data/img/' + 'flower_photos/inception_v3_2016_08_28.ckpt') checkpoint_path = os.path.join(cls.working_dir, "checkpoint") response = urlopen(url) with open(checkpoint_path, 'wb') as f: f.write(response.read()) # Make csv input file cls.csv_input_filepath = os.path.join(cls.source_dir, 'input.csv') file_io.write_string_to_file( cls.csv_input_filepath, '1,Monday,23.0,red blue,%s\n' % img1_file + '0,Friday,18.0,green,%s\n' % img2_file + '0,Sunday,12.0,green red blue green,%s\n' % img3_file) # Call analyze.py to create analysis results. schema = [{'name': 'target_col', 'type': 'FLOAT'}, {'name': 'cat_col', 'type': 'STRING'}, {'name': 'num_col', 'type': 'FLOAT'}, {'name': 'text_col', 'type': 'STRING'}, {'name': 'img_col', 'type': 'STRING'}] schema_file = os.path.join(cls.source_dir, 'schema.json') file_io.write_string_to_file(schema_file, json.dumps(schema)) features = {'target_col': {'transform': 'target'}, 'cat_col': {'transform': 'one_hot'}, 'num_col': {'transform': 'identity'}, 'text_col': {'transform': 'multi_hot'}, 'img_col': {'transform': 'image_to_vec', 'checkpoint': checkpoint_path}} features_file = os.path.join(cls.source_dir, 'features.json') file_io.write_string_to_file(features_file, json.dumps(features)) cmd = ['python ' + os.path.join(CODE_PATH, 'analyze.py'), '--output=' + cls.analysis_dir, '--csv=' + cls.csv_input_filepath, '--schema=' + schema_file, '--features=' + features_file] subprocess.check_call(' '.join(cmd), shell=True)
Example #4
Source File: test_transform.py From pydatalab with Apache License 2.0 | 4 votes |
def setUpClass(cls): # Set up dirs. cls.working_dir = tempfile.mkdtemp() cls.source_dir = os.path.join(cls.working_dir, 'source') cls.analysis_dir = os.path.join(cls.working_dir, 'analysis') cls.output_dir = os.path.join(cls.working_dir, 'output') file_io.create_dir(cls.source_dir) # Make test image files. img1_file = os.path.join(cls.source_dir, 'img1.jpg') image1 = Image.new('RGB', size=(300, 300), color=(155, 0, 0)) image1.save(img1_file) img2_file = os.path.join(cls.source_dir, 'img2.jpg') image2 = Image.new('RGB', size=(50, 50), color=(125, 240, 0)) image2.save(img2_file) img3_file = os.path.join(cls.source_dir, 'img3.jpg') image3 = Image.new('RGB', size=(800, 600), color=(33, 55, 77)) image3.save(img3_file) # Download inception checkpoint. Note that gs url doesn't work because # we may not have gcloud signed in when running the test. url = ('https://storage.googleapis.com/cloud-ml-data/img/' + 'flower_photos/inception_v3_2016_08_28.ckpt') checkpoint_path = os.path.join(cls.working_dir, "checkpoint") response = urlopen(url) with open(checkpoint_path, 'wb') as f: f.write(response.read()) # Make csv input file cls.csv_input_filepath = os.path.join(cls.source_dir, 'input.csv') file_io.write_string_to_file( cls.csv_input_filepath, '1,1,Monday,23.0,%s\n' % img1_file + '2,0,Friday,18.0,%s\n' % img2_file + '3,0,Sunday,12.0,%s\n' % img3_file) # Call analyze.py to create analysis results. schema = [{'name': 'key_col', 'type': 'INTEGER'}, {'name': 'target_col', 'type': 'FLOAT'}, {'name': 'cat_col', 'type': 'STRING'}, {'name': 'num_col', 'type': 'FLOAT'}, {'name': 'img_col', 'type': 'STRING'}] schema_file = os.path.join(cls.source_dir, 'schema.json') file_io.write_string_to_file(schema_file, json.dumps(schema)) features = {'key_col': {'transform': 'key'}, 'target_col': {'transform': 'target'}, 'cat_col': {'transform': 'one_hot'}, 'num_col': {'transform': 'identity'}, 'img_col': {'transform': 'image_to_vec', 'checkpoint': checkpoint_path}} features_file = os.path.join(cls.source_dir, 'features.json') file_io.write_string_to_file(features_file, json.dumps(features)) cmd = ['python ' + os.path.join(CODE_PATH, 'analyze.py'), '--output=' + cls.analysis_dir, '--csv=' + cls.csv_input_filepath, '--schema=' + schema_file, '--features=' + features_file] subprocess.check_call(' '.join(cmd), shell=True)