Python tensorflow.Records() Examples
The following are 4
code examples of tensorflow.Records().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: preprocessing.py From training_results_v0.5 with Apache License 2.0 | 6 votes |
def get_input_tensors(batch_size, tf_records, num_repeats=None, shuffle_records=True, shuffle_examples=True, shuffle_buffer_size=None, filter_amount=0.05): '''Read tf.Records and prepare them for ingestion by dual_net. See `read_tf_records` for parameter documentation. Returns a dict of tensors (see return value of batch_parse_tf_example) ''' if shuffle_buffer_size is None: shuffle_buffer_size = SHUFFLE_BUFFER_SIZE dataset = read_tf_records(batch_size, tf_records, num_repeats=num_repeats, shuffle_records=shuffle_records, shuffle_examples=shuffle_examples, shuffle_buffer_size=shuffle_buffer_size, filter_amount=filter_amount) dataset = dataset.filter(lambda t: tf.equal(tf.shape(t)[0], batch_size)) dataset = dataset.map(functools.partial( batch_parse_tf_example, batch_size)) return dataset.make_one_shot_iterator().get_next() # End-to-end utility functions
Example #2
Source File: preprocessing.py From training_results_v0.5 with Apache License 2.0 | 6 votes |
def get_input_tensors(batch_size, tf_records, num_repeats=None, shuffle_records=True, shuffle_examples=True, shuffle_buffer_size=None, filter_amount=0.05): '''Read tf.Records and prepare them for ingestion by dual_net. See `read_tf_records` for parameter documentation. Returns a dict of tensors (see return value of batch_parse_tf_example) ''' if shuffle_buffer_size is None: shuffle_buffer_size = SHUFFLE_BUFFER_SIZE dataset = read_tf_records(batch_size, tf_records, num_repeats=num_repeats, shuffle_records=shuffle_records, shuffle_examples=shuffle_examples, shuffle_buffer_size=shuffle_buffer_size, filter_amount=filter_amount) dataset = dataset.filter(lambda t: tf.equal(tf.shape(t)[0], batch_size)) dataset = dataset.map(functools.partial( batch_parse_tf_example, batch_size)) return dataset.make_one_shot_iterator().get_next() # End-to-end utility functions
Example #3
Source File: DeeProtein.py From AiGEM_TeamHeidelberg2017 with MIT License | 5 votes |
def check_data(self, tfrecords_filename): """Checks a specified tf.Records file for coreect dataformat. Check if the data format in the example files is correct. Prints the shape of the data stored in a tf.Records file. Args tfrecords_filename: `str`, the path to the `tf.records` file to check. """ record_iterator = tf.python_io.tf_record_iterator(path=tfrecords_filename) for string_record in record_iterator: # Parse the next example example = tf.train.Example() example.ParseFromString(string_record) # Get the features you stored (change to match your tfrecord writing code) seq = (example.features.feature['seq_raw'] .bytes_list .value[0]) label = (example.features.feature['label_raw'] .bytes_list .value[0]) # Convert to a numpy array (change dtype to the datatype you stored) seq_array = np.fromstring(seq, dtype=np.float64) label_array = np.fromstring(label, dtype=np.float64) # Print the image shape; does it match your expectations? print(seq_array.shape) print(label_array.shape)
Example #4
Source File: preprocessing.py From training with Apache License 2.0 | 5 votes |
def get_input_tensors(batch_size, feature_layout, tf_records, num_repeats=1, shuffle_records=True, shuffle_examples=True, shuffle_buffer_size=None, filter_amount=0.05, random_rotation=True): """Read tf.Records and prepare them for ingestion by dual_net. See `read_tf_records` for parameter documentation. Returns a dict of tensors (see return value of batch_parse_tf_example) """ print("Reading tf_records from {} inputs".format(len(tf_records))) dataset = read_tf_records( batch_size, tf_records, num_repeats=num_repeats, shuffle_records=shuffle_records, shuffle_examples=shuffle_examples, shuffle_buffer_size=shuffle_buffer_size, filter_amount=filter_amount, interleave=False) dataset = dataset.filter(lambda t: tf.equal(tf.shape(t)[0], batch_size)) dataset = dataset.map( functools.partial(batch_parse_tf_example, batch_size, feature_layout)) if random_rotation: # Unbatch the dataset so we can rotate it dataset = dataset.apply(tf.data.experimental.unbatch()) dataset = dataset.apply(tf.data.experimental.map_and_batch( functools.partial(_random_rotation, feature_layout), batch_size)) return dataset.make_one_shot_iterator().get_next()