Python tensorflow.gfile() Examples
The following are 30
code examples of tensorflow.gfile().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: datasets.py From object_detection_with_tensorflow with MIT License | 6 votes |
def read_MNIST(binarize=False): """Reads in MNIST images. Args: binarize: whether to use the fixed binarization Returns: x_train: 50k training images x_valid: 10k validation images x_test: 10k test images """ with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_BINARIZED), 'r') as f: (x_train, _), (x_valid, _), (x_test, _) = pickle.load(f) if not binarize: with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_FLOAT), 'r') as f: x_train = np.load(f).reshape(-1, 784) return x_train, x_valid, x_test
Example #2
Source File: logging.py From parasol with MIT License | 6 votes |
def tee_out(out_dir): out_dir = Path(out_dir) stdout = tempfile.NamedTemporaryFile(delete=False) old_stdout = sys.stdout old_stderr = sys.stderr stderr = tempfile.NamedTemporaryFile(delete=False) try: with StdoutTee(stdout.name, buff=1) as out, StderrTee(stderr.name, buff=1) as err: yield except: raise finally: sys.stdout = old_stdout sys.stderr = old_stderr with gfile.GFile(out_dir / 'stdout.log', 'w') as fp: with gfile.GFile(stdout.name, 'r') as out: fp.write(out.read()) with gfile.GFile(out_dir / 'stderr.log', 'w') as fp: with gfile.GFile(stderr.name, 'r') as err: fp.write(err.read()) os.remove(stdout.name) os.remove(stderr.name)
Example #3
Source File: gen_data.py From multilabel-image-classification-tensorflow with MIT License | 6 votes |
def _gen_example(i, all_examples): """Saves one example to file. Also adds it to all_examples dict.""" example = dataloader.get_example_with_index(i) if not example: return image_seq_stack = _stack_image_seq(example['image_seq']) example.pop('image_seq', None) # Free up memory. intrinsics = example['intrinsics'] fx = intrinsics[0, 0] fy = intrinsics[1, 1] cx = intrinsics[0, 2] cy = intrinsics[1, 2] save_dir = os.path.join(FLAGS.data_dir, example['folder_name']) if not gfile.Exists(save_dir): gfile.MakeDirs(save_dir) img_filepath = os.path.join(save_dir, '%s.jpg' % example['file_name']) scipy.misc.imsave(img_filepath, image_seq_stack.astype(np.uint8)) cam_filepath = os.path.join(save_dir, '%s_cam.txt' % example['file_name']) example['cam'] = '%f,0.,%f,0.,%f,%f,0.,0.,1.' % (fx, cx, fy, cy) with open(cam_filepath, 'w') as cam_f: cam_f.write(example['cam']) key = example['folder_name'] + '_' + example['file_name'] all_examples[key] = example
Example #4
Source File: gen_data.py From models with Apache License 2.0 | 6 votes |
def _gen_example(i, all_examples): """Saves one example to file. Also adds it to all_examples dict.""" example = dataloader.get_example_with_index(i) if not example: return image_seq_stack = _stack_image_seq(example['image_seq']) example.pop('image_seq', None) # Free up memory. intrinsics = example['intrinsics'] fx = intrinsics[0, 0] fy = intrinsics[1, 1] cx = intrinsics[0, 2] cy = intrinsics[1, 2] save_dir = os.path.join(FLAGS.data_dir, example['folder_name']) if not gfile.Exists(save_dir): gfile.MakeDirs(save_dir) img_filepath = os.path.join(save_dir, '%s.jpg' % example['file_name']) scipy.misc.imsave(img_filepath, image_seq_stack.astype(np.uint8)) cam_filepath = os.path.join(save_dir, '%s_cam.txt' % example['file_name']) example['cam'] = '%f,0.,%f,0.,%f,%f,0.,0.,1.' % (fx, cx, fy, cy) with open(cam_filepath, 'w') as cam_f: cam_f.write(example['cam']) key = example['folder_name'] + '_' + example['file_name'] all_examples[key] = example
Example #5
Source File: plot_results.py From parasol with MIT License | 6 votes |
def find_files(path, name): if gfile.IsDirectory(path): files = gfile.ListDirectory(path) for p in files: if p == name: yield path / p return for p in files: yield from find_files(path / p, name) else: for p in gfile.ListDirectory(path.parent): if not fnmatch.fnmatch(path.parent / p, path.replace('[', 'asdf').replace(']', 'fdsa').replace('asdf', '[[]').replace('fdsa', '[]]')): continue p = Path(path.parent / p) if p == path: continue yield from find_files(p, name)
Example #6
Source File: data_preprocessing.py From multilabel-image-classification-tensorflow with MIT License | 6 votes |
def write_flagfile(flags_, ncf_dataset): """Write flagfile to begin async data generation.""" if ncf_dataset.deterministic: flags_["seed"] = stat_utils.random_int32() # We write to a temp file then atomically rename it to the final file, # because writing directly to the final file can cause the data generation # async process to read a partially written JSON file. flagfile_temp = os.path.join(ncf_dataset.cache_paths.cache_root, rconst.FLAGFILE_TEMP) tf.logging.info("Preparing flagfile for async data generation in {} ..." .format(flagfile_temp)) with tf.gfile.Open(flagfile_temp, "w") as f: for k, v in six.iteritems(flags_): f.write("--{}={}\n".format(k, v)) flagfile = os.path.join(ncf_dataset.cache_paths.cache_root, rconst.FLAGFILE) tf.gfile.Rename(flagfile_temp, flagfile) tf.logging.info( "Wrote flagfile for async data generation in {}.".format(flagfile))
Example #7
Source File: datasets.py From yolo_v2 with Apache License 2.0 | 6 votes |
def read_MNIST(binarize=False): """Reads in MNIST images. Args: binarize: whether to use the fixed binarization Returns: x_train: 50k training images x_valid: 10k validation images x_test: 10k test images """ with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_BINARIZED), 'r') as f: (x_train, _), (x_valid, _), (x_test, _) = pickle.load(f) if not binarize: with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_FLOAT), 'r') as f: x_train = np.load(f).reshape(-1, 784) return x_train, x_valid, x_test
Example #8
Source File: kpn_data_provider.py From burst-denoising with Apache License 2.0 | 6 votes |
def load_batch_demosaic(BURST_LENGTH, dataset_dir, batch_size=32, height=64, width=64, degamma=1., to_shift=1., upscale=1, jitter=1): filenames = [os.path.join(dataset_dir, f) for f in gfile.ListDirectory(dataset_dir)] filename_queue = tf.train.string_input_producer(filenames) mosaic = None while mosaic == None: _, image_file = tf.WholeFileReader().read(filename_queue) image = tf.image.decode_image(image_file) mosaic, demosaic, shift = make_stack_demosaic((tf.cast(image[0], tf.float32) / 255.)**degamma, height, width, 128, BURST_LENGTH, to_shift, upscale, jitter) # Batch it up. mosaic, demosaic, shift = tf.train.shuffle_batch( [mosaic, demosaic, shift], batch_size=batch_size, num_threads=2, capacity=500 + 3 * batch_size, enqueue_many=True, min_after_dequeue=100) return mosaic, demosaic, shift
Example #9
Source File: kpn_data_provider.py From burst-denoising with Apache License 2.0 | 6 votes |
def load_batch_noised(depth, dataset_dir, batch_size=32, height=64, width=64, degamma=1., sig_range=20.): filenames = [os.path.join(dataset_dir, f) for f in gfile.ListDirectory(dataset_dir)] filename_queue = tf.train.string_input_producer(filenames) noised_stack = None while noised_stack == None: _, image_file = tf.WholeFileReader().read(filename_queue) image = tf.image.decode_image(image_file) noised_stack, denoised_stack, sig_stack = make_stack_noised((tf.cast(image[0], tf.float32) / 255.)**degamma, height, width, depth, sig_range) # Batch it up. noised, denoised, sig = tf.train.shuffle_batch( [noised_stack, denoised_stack, sig_stack], batch_size=batch_size, num_threads=2, capacity=1024 + 3 * batch_size, enqueue_many=True, min_after_dequeue=500) return noised, denoised, sig
Example #10
Source File: test_util.py From in-silico-labeling with Apache License 2.0 | 6 votes |
def assert_images_near(self, directory: str, only_check_size: bool = False): """Assert images in the golden directory match those in the test.""" # We assume all images are pngs. glob = os.path.join(os.environ['TEST_SRCDIR'], 'isl/testdata', directory, '*.png') golden_image_paths = gfile.Glob(glob) assert golden_image_paths, glob logging.info('Golden images for test match are: %s', golden_image_paths) for gip in golden_image_paths: test_image_path = os.path.join(os.environ['TEST_TMPDIR'], directory, os.path.basename(gip)) assert gfile.Exists( test_image_path), "Test image doesn't exist: %s" % test_image_path golden = util.read_image(gip) test = util.read_image(test_image_path) if only_check_size: assert golden.shape == test.shape, (golden.shape, test.shape) else: np.testing.assert_allclose(golden, test, rtol=0.0001, atol=0.0001)
Example #11
Source File: gen_data.py From g-tensorflow-models with Apache License 2.0 | 6 votes |
def _gen_example(i, all_examples): """Saves one example to file. Also adds it to all_examples dict.""" example = dataloader.get_example_with_index(i) if not example: return image_seq_stack = _stack_image_seq(example['image_seq']) example.pop('image_seq', None) # Free up memory. intrinsics = example['intrinsics'] fx = intrinsics[0, 0] fy = intrinsics[1, 1] cx = intrinsics[0, 2] cy = intrinsics[1, 2] save_dir = os.path.join(FLAGS.data_dir, example['folder_name']) if not gfile.Exists(save_dir): gfile.MakeDirs(save_dir) img_filepath = os.path.join(save_dir, '%s.jpg' % example['file_name']) scipy.misc.imsave(img_filepath, image_seq_stack.astype(np.uint8)) cam_filepath = os.path.join(save_dir, '%s_cam.txt' % example['file_name']) example['cam'] = '%f,0.,%f,0.,%f,%f,0.,0.,1.' % (fx, cx, fy, cy) with open(cam_filepath, 'w') as cam_f: cam_f.write(example['cam']) key = example['folder_name'] + '_' + example['file_name'] all_examples[key] = example
Example #12
Source File: launch.py From in-silico-labeling with Apache License 2.0 | 6 votes |
def infer_single_image(gitapp: controller.GetInputTargetAndPredictedParameters): """Predicts the labels for a single image.""" if not gfile.Exists(output_directory()): gfile.MakeDirs(output_directory()) if FLAGS.infer_channel_whitelist is not None: infer_channel_whitelist = FLAGS.infer_channel_whitelist.split(',') else: infer_channel_whitelist = None while True: infer.infer( gitapp=gitapp, restore_directory=FLAGS.restore_directory or train_directory(), output_directory=output_directory(), extract_patch_size=CONCORDANCE_EXTRACT_PATCH_SIZE, stitch_stride=CONCORDANCE_STITCH_STRIDE, infer_size=FLAGS.infer_size, channel_whitelist=infer_channel_whitelist, simplify_error_panels=FLAGS.infer_simplify_error_panels, ) if not FLAGS.infer_continuously: break
Example #13
Source File: datasets.py From hands-detection with MIT License | 6 votes |
def read_MNIST(binarize=False): """Reads in MNIST images. Args: binarize: whether to use the fixed binarization Returns: x_train: 50k training images x_valid: 10k validation images x_test: 10k test images """ with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_BINARIZED), 'r') as f: (x_train, _), (x_valid, _), (x_test, _) = pickle.load(f) if not binarize: with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_FLOAT), 'r') as f: x_train = np.load(f).reshape(-1, 784) return x_train, x_valid, x_test
Example #14
Source File: kpn_data_provider.py From burst-denoising with Apache License 2.0 | 6 votes |
def load_batch_hqjitter(dataset_dir, patches_per_img=32, min_queue=8, BURST_LENGTH=1, batch_size=32, repeats=1, height=64, width=64, degamma=1., to_shift=1., upscale=1, jitter=1, smalljitter=1): filenames = [os.path.join(dataset_dir, f) for f in gfile.ListDirectory(dataset_dir)] filename_queue = tf.train.string_input_producer(filenames) _, image_file = tf.WholeFileReader().read(filename_queue) image = tf.image.decode_image(image_file) patches = make_stack_hqjitter((tf.cast(image[0], tf.float32) / 255.)**degamma, height, width, patches_per_img, BURST_LENGTH, to_shift, upscale, jitter) unique = batch_size//repeats # Batch it up. patches = tf.train.shuffle_batch( [patches], batch_size=unique, num_threads=2, capacity=min_queue + 3 * batch_size, enqueue_many=True, min_after_dequeue=min_queue) print('PATCHES =================',patches.get_shape().as_list()) patches = make_batch_hqjitter(patches, BURST_LENGTH, batch_size, repeats, height, width, to_shift, upscale, jitter, smalljitter) return patches
Example #15
Source File: datasets.py From object_detection_kitti with Apache License 2.0 | 6 votes |
def read_MNIST(binarize=False): """Reads in MNIST images. Args: binarize: whether to use the fixed binarization Returns: x_train: 50k training images x_valid: 10k validation images x_test: 10k test images """ with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_BINARIZED), 'r') as f: (x_train, _), (x_valid, _), (x_test, _) = pickle.load(f) if not binarize: with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_FLOAT), 'r') as f: x_train = np.load(f).reshape(-1, 784) return x_train, x_valid, x_test
Example #16
Source File: datasets.py From Gun-Detector with Apache License 2.0 | 6 votes |
def read_MNIST(binarize=False): """Reads in MNIST images. Args: binarize: whether to use the fixed binarization Returns: x_train: 50k training images x_valid: 10k validation images x_test: 10k test images """ with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_BINARIZED), 'r') as f: (x_train, _), (x_valid, _), (x_test, _) = pickle.load(f) if not binarize: with gfile.FastGFile(os.path.join(config.DATA_DIR, config.MNIST_FLOAT), 'r') as f: x_train = np.load(f).reshape(-1, 784) return x_train, x_valid, x_test
Example #17
Source File: util.py From models with Apache License 2.0 | 5 votes |
def read_text_lines(filepath): with tf.gfile.Open(filepath, 'r') as f: lines = f.readlines() lines = [l.rstrip() for l in lines] return lines
Example #18
Source File: reader.py From g-tensorflow-models with Apache License 2.0 | 5 votes |
def compile_file_list(self, data_dir, split, load_pose=False): """Creates a list of input files.""" logging.info('data_dir: %s', data_dir) with gfile.Open(os.path.join(data_dir, '%s.txt' % split), 'r') as f: frames = f.readlines() subfolders = [x.split(' ')[0] for x in frames] frame_ids = [x.split(' ')[1][:-1] for x in frames] image_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '.jpg') for i in range(len(frames)) ] cam_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '_cam.txt') for i in range(len(frames)) ] file_lists = {} file_lists['image_file_list'] = image_file_list file_lists['cam_file_list'] = cam_file_list if load_pose: pose_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '_pose.txt') for i in range(len(frames)) ] file_lists['pose_file_list'] = pose_file_list self.steps_per_epoch = len(image_file_list) // self.batch_size return file_lists
Example #19
Source File: file.py From mac-graph with The Unlicense | 5 votes |
def __enter__(self): try: os.makedirs(self.file_dir, exist_ok=True) except Exception: pass self.file = tf.gfile.GFile(self.file_path, self.open_str) return self.file
Example #20
Source File: util.py From multilabel-image-classification-tensorflow with MIT License | 5 votes |
def load_image(img_file, resize=None, interpolation='linear'): """Load image from disk. Output value range: [0,1].""" im_data = np.fromstring(gfile.Open(img_file).read(), np.uint8) im = cv2.imdecode(im_data, cv2.IMREAD_COLOR) im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB) if resize and resize != im.shape[:2]: ip = cv2.INTER_LINEAR if interpolation == 'linear' else cv2.INTER_NEAREST im = cv2.resize(im, resize, interpolation=ip) return np.array(im, dtype=np.float32) / 255.0
Example #21
Source File: reader.py From g-tensorflow-models with Apache License 2.0 | 5 votes |
def compile_file_list(self, data_dir, split, load_pose=False): """Creates a list of input files.""" logging.info('data_dir: %s', data_dir) with gfile.Open(os.path.join(data_dir, '%s.txt' % split), 'r') as f: frames = f.readlines() frames = [k.rstrip() for k in frames] subfolders = [x.split(' ')[0] for x in frames] frame_ids = [x.split(' ')[1] for x in frames] image_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '.' + self.file_extension) for i in range(len(frames)) ] segment_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '-fseg.' + self.file_extension) for i in range(len(frames)) ] cam_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '_cam.txt') for i in range(len(frames)) ] file_lists = {} file_lists['image_file_list'] = image_file_list file_lists['segment_file_list'] = segment_file_list file_lists['cam_file_list'] = cam_file_list if load_pose: pose_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '_pose.txt') for i in range(len(frames)) ] file_lists['pose_file_list'] = pose_file_list self.steps_per_epoch = len(image_file_list) // self.batch_size return file_lists
Example #22
Source File: util.py From multilabel-image-classification-tensorflow with MIT License | 5 votes |
def save_image(img_file, im, file_extension): """Save image from disk. Expected input value range: [0,1].""" im = (im * 255.0).astype(np.uint8) with gfile.Open(img_file, 'w') as f: im = cv2.cvtColor(im, cv2.COLOR_RGB2BGR) _, im_data = cv2.imencode('.%s' % file_extension, im) f.write(im_data.tostring())
Example #23
Source File: inference.py From g-tensorflow-models with Apache License 2.0 | 5 votes |
def create_output_dirs(im_files, basepath_in, output_dir): """Creates required directories, and returns output dir for each file.""" output_dirs = [] for i in range(len(im_files)): relative_folder_in = os.path.relpath( os.path.dirname(im_files[i]), basepath_in) absolute_folder_out = os.path.join(output_dir, relative_folder_in) if not gfile.IsDirectory(absolute_folder_out): gfile.MakeDirs(absolute_folder_out) output_dirs.append(absolute_folder_out) return output_dirs
Example #24
Source File: util.py From multilabel-image-classification-tensorflow with MIT License | 5 votes |
def read_text_lines(filepath): with tf.gfile.Open(filepath, 'r') as f: lines = f.readlines() lines = [l.rstrip() for l in lines] return lines
Example #25
Source File: util.py From g-tensorflow-models with Apache License 2.0 | 5 votes |
def read_text_lines(filepath): with tf.gfile.Open(filepath, 'r') as f: lines = f.readlines() lines = [l.rstrip() for l in lines] return lines
Example #26
Source File: util.py From g-tensorflow-models with Apache License 2.0 | 5 votes |
def save_image(img_file, im, file_extension): """Save image from disk. Expected input value range: [0,1].""" im = (im * 255.0).astype(np.uint8) with gfile.Open(img_file, 'w') as f: im = cv2.cvtColor(im, cv2.COLOR_RGB2BGR) _, im_data = cv2.imencode('.%s' % file_extension, im) f.write(im_data.tostring())
Example #27
Source File: inference.py From multilabel-image-classification-tensorflow with MIT License | 5 votes |
def create_output_dirs(im_files, basepath_in, output_dir): """Creates required directories, and returns output dir for each file.""" output_dirs = [] for i in range(len(im_files)): relative_folder_in = os.path.relpath( os.path.dirname(im_files[i]), basepath_in) absolute_folder_out = os.path.join(output_dir, relative_folder_in) if not gfile.IsDirectory(absolute_folder_out): gfile.MakeDirs(absolute_folder_out) output_dirs.append(absolute_folder_out) return output_dirs
Example #28
Source File: util.py From g-tensorflow-models with Apache License 2.0 | 5 votes |
def load_image(img_file, resize=None, interpolation='linear'): """Load image from disk. Output value range: [0,1].""" im_data = np.fromstring(gfile.Open(img_file).read(), np.uint8) im = cv2.imdecode(im_data, cv2.IMREAD_COLOR) im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB) if resize and resize != im.shape[:2]: ip = cv2.INTER_LINEAR if interpolation == 'linear' else cv2.INTER_NEAREST im = cv2.resize(im, resize, interpolation=ip) return np.array(im, dtype=np.float32) / 255.0
Example #29
Source File: reader.py From multilabel-image-classification-tensorflow with MIT License | 5 votes |
def compile_file_list(self, data_dir, split, load_pose=False): """Creates a list of input files.""" logging.info('data_dir: %s', data_dir) with gfile.Open(os.path.join(data_dir, '%s.txt' % split), 'r') as f: frames = f.readlines() subfolders = [x.split(' ')[0] for x in frames] frame_ids = [x.split(' ')[1][:-1] for x in frames] image_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '.jpg') for i in range(len(frames)) ] cam_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '_cam.txt') for i in range(len(frames)) ] file_lists = {} file_lists['image_file_list'] = image_file_list file_lists['cam_file_list'] = cam_file_list if load_pose: pose_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '_pose.txt') for i in range(len(frames)) ] file_lists['pose_file_list'] = pose_file_list self.steps_per_epoch = len(image_file_list) // self.batch_size return file_lists
Example #30
Source File: reader.py From multilabel-image-classification-tensorflow with MIT License | 5 votes |
def compile_file_list(self, data_dir, split, load_pose=False): """Creates a list of input files.""" logging.info('data_dir: %s', data_dir) with gfile.Open(os.path.join(data_dir, '%s.txt' % split), 'r') as f: frames = f.readlines() frames = [k.rstrip() for k in frames] subfolders = [x.split(' ')[0] for x in frames] frame_ids = [x.split(' ')[1] for x in frames] image_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '.' + self.file_extension) for i in range(len(frames)) ] segment_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '-fseg.' + self.file_extension) for i in range(len(frames)) ] cam_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '_cam.txt') for i in range(len(frames)) ] file_lists = {} file_lists['image_file_list'] = image_file_list file_lists['segment_file_list'] = segment_file_list file_lists['cam_file_list'] = cam_file_list if load_pose: pose_file_list = [ os.path.join(data_dir, subfolders[i], frame_ids[i] + '_pose.txt') for i in range(len(frames)) ] file_lists['pose_file_list'] = pose_file_list self.steps_per_epoch = len(image_file_list) // self.batch_size return file_lists