Python tensorflow.python.ops.lookup_ops.index_table_from_tensor() Examples
The following are 30
code examples of tensorflow.python.ops.lookup_ops.index_table_from_tensor().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.ops.lookup_ops
, or try the search function
.
Example #1
Source File: feature_column.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 6 votes |
def _transform_feature(self, inputs): input_tensor = _to_sparse_input(inputs.get(self.key)) if self.dtype.is_integer != input_tensor.dtype.is_integer: raise ValueError( 'Column dtype and SparseTensors dtype must be compatible. ' 'key: {}, column dtype: {}, tensor dtype: {}'.format( self.key, self.dtype, input_tensor.dtype)) _assert_string_or_int( input_tensor.dtype, prefix='column_name: {} input_tensor'.format(self.key)) key_dtype = self.dtype if input_tensor.dtype.is_integer: # `index_table_from_tensor` requires 64-bit integer keys. key_dtype = dtypes.int64 input_tensor = math_ops.to_int64(input_tensor) return lookup_ops.index_table_from_tensor( vocabulary_list=tuple(self.vocabulary_list), default_value=self.default_value, num_oov_buckets=self.num_oov_buckets, dtype=key_dtype, name='{}_lookup'.format(self.key)).lookup(input_tensor)
Example #2
Source File: feature_column.py From lambda-packs with MIT License | 6 votes |
def _transform_feature(self, inputs): input_tensor = _to_sparse_input(inputs.get(self.key)) if self.dtype.is_integer != input_tensor.dtype.is_integer: raise ValueError( 'Column dtype and SparseTensors dtype must be compatible. ' 'key: {}, column dtype: {}, tensor dtype: {}'.format( self.key, self.dtype, input_tensor.dtype)) _assert_string_or_int( input_tensor.dtype, prefix='column_name: {} input_tensor'.format(self.key)) key_dtype = self.dtype if input_tensor.dtype.is_integer: # `index_table_from_tensor` requires 64-bit integer keys. key_dtype = dtypes.int64 input_tensor = math_ops.to_int64(input_tensor) return lookup_ops.index_table_from_tensor( vocabulary_list=tuple(self.vocabulary_list), default_value=self.default_value, dtype=key_dtype, name='{}_lookup'.format(self.key)).lookup(input_tensor)
Example #3
Source File: head.py From estimator with Apache License 2.0 | 6 votes |
def _label_ids(self, labels): """Converts labels to integer id space.""" if self._label_vocabulary is None: if not labels.dtype.is_integer: raise ValueError( 'Labels dtype should be integer. Instead got {}.'.format( labels.dtype)) label_ids = labels else: if labels.dtype != tf.dtypes.string: raise ValueError('Labels dtype should be string if there is a ' 'vocabulary. Instead got {}'.format(labels.dtype)) label_ids = lookup_ops.index_table_from_tensor( vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup').lookup(labels) return _assert_range(label_ids, self._n_classes)
Example #4
Source File: head.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def _label_ids(self, labels): """Converts labels to integer id space.""" if self._label_vocabulary is None: if not labels.dtype.is_integer: raise ValueError('Labels dtype should be integer ' 'Instead got %s.' % labels.dtype) label_ids = labels else: if labels.dtype != dtypes.string: raise ValueError('Labels dtype should be string if there is a ' 'vocabulary. Instead got {}'.format(labels.dtype)) label_ids = lookup_ops.index_table_from_tensor( vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup').lookup(labels) return _assert_range(label_ids, self._n_classes)
Example #5
Source File: multi_class_head.py From estimator with Apache License 2.0 | 5 votes |
def _class_id_table(self): """Creates a lookup table for class_id. In eager execution, this lookup table will be lazily created on the first call of `self._class_id_table`, and cached for later use; In graph execution, it will be created on demand. Returns: A hash table for lookup. """ if self._cached_class_id_table is None or not tf.executing_eagerly(): self._cached_class_id_table = lookup_ops.index_table_from_tensor( vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup') return self._cached_class_id_table
Example #6
Source File: multi_label_head.py From estimator with Apache License 2.0 | 5 votes |
def _class_id_table(self): """Creates a lookup table for class_id. In eager execution, this lookup table will be lazily created on the first call of `self._class_id_table`, and cached for later use; In graph execution, it will be created on demand. Returns: A hash table for lookup. """ if self._cached_class_id_table is None or not tf.executing_eagerly(): self._cached_class_id_table = lookup_ops.index_table_from_tensor( vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup') return self._cached_class_id_table
Example #7
Source File: binary_class_head.py From estimator with Apache License 2.0 | 5 votes |
def _class_id_table(self): """Creates a lookup table for class_id. In eager execution, this lookup table will be lazily created on the first call of `self._class_id_table`, and cached for later use; In graph execution, it will be created on demand. Returns: A hash table for lookup. """ if self._cached_class_id_table is None or not tf.executing_eagerly(): self._cached_class_id_table = lookup_ops.index_table_from_tensor( vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup') return self._cached_class_id_table
Example #8
Source File: head.py From estimator with Apache License 2.0 | 5 votes |
def create_loss(self, features, mode, logits, labels): """See `Head`.""" del mode # Unused for this head. logits = ops.convert_to_tensor(logits) labels = _check_dense_labels_match_logits_and_reshape( labels=labels, logits=logits, expected_labels_dimension=1) if self._label_vocabulary is not None: labels = lookup_ops.index_table_from_tensor( vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup').lookup(labels) labels = tf.cast(labels, dtype=tf.dtypes.float32) labels = _assert_range(labels, n_classes=2) if self._loss_fn: unweighted_loss = _call_loss_fn( loss_fn=self._loss_fn, labels=labels, logits=logits, features=features, expected_loss_dim=1) else: unweighted_loss = tf.compat.v1.nn.sigmoid_cross_entropy_with_logits( labels=labels, logits=logits) weights = _get_weights_and_check_match_logits( features=features, weight_column=self._weight_column, logits=logits) training_loss = tf.compat.v1.losses.compute_weighted_loss( unweighted_loss, weights=weights, reduction=self._loss_reduction) return LossSpec( training_loss=training_loss, unreduced_loss=unweighted_loss, weights=weights, processed_labels=labels)
Example #9
Source File: head.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def create_loss(self, features, mode, logits, labels): """See `Head`.""" del mode, features # Unused for this head. labels = _check_labels(_maybe_expand_dim(labels), self.logits_dimension) if self._label_vocabulary is not None: labels = lookup_ops.index_table_from_tensor( vocabulary_list=tuple(self._label_vocabulary), name='class_id_lookup').lookup(labels) labels = math_ops.to_float(labels) labels = _assert_range(labels, 2) return LossAndLabels( unweighted_loss=nn.sigmoid_cross_entropy_with_logits( labels=labels, logits=logits), processed_labels=labels)
Example #10
Source File: common_test_utils.py From nmt with Apache License 2.0 | 5 votes |
def create_test_iterator(hparams, mode): """Create test iterator.""" src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant([hparams.eos, "a", "b", "c", "d"])) tgt_vocab_mapping = tf.constant([hparams.sos, hparams.eos, "a", "b", "c"]) tgt_vocab_table = lookup_ops.index_table_from_tensor(tgt_vocab_mapping) if mode == tf.contrib.learn.ModeKeys.INFER: reverse_tgt_vocab_table = lookup_ops.index_to_string_table_from_tensor( tgt_vocab_mapping) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a a b b c", "a b b"])) if mode != tf.contrib.learn.ModeKeys.INFER: tgt_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a b c b c", "a b c b"])) return ( iterator_utils.get_iterator( src_dataset=src_dataset, tgt_dataset=tgt_dataset, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, batch_size=hparams.batch_size, sos=hparams.sos, eos=hparams.eos, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets), src_vocab_table, tgt_vocab_table) else: return ( iterator_utils.get_infer_iterator( src_dataset=src_dataset, src_vocab_table=src_vocab_table, eos=hparams.eos, batch_size=hparams.batch_size), src_vocab_table, tgt_vocab_table, reverse_tgt_vocab_table)
Example #11
Source File: common_test_utils.py From inference with Apache License 2.0 | 5 votes |
def create_test_iterator(hparams, mode): """Create test iterator.""" src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant([hparams.eos, "a", "b", "c", "d"])) tgt_vocab_mapping = tf.constant([hparams.sos, hparams.eos, "a", "b", "c"]) tgt_vocab_table = lookup_ops.index_table_from_tensor(tgt_vocab_mapping) if mode == tf.contrib.learn.ModeKeys.INFER: reverse_tgt_vocab_table = lookup_ops.index_to_string_table_from_tensor( tgt_vocab_mapping) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a a b b c", "a b b"])) if mode != tf.contrib.learn.ModeKeys.INFER: tgt_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a b c b c", "a b c b"])) return ( iterator_utils.get_iterator( src_dataset=src_dataset, tgt_dataset=tgt_dataset, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, batch_size=hparams.batch_size, sos=hparams.sos, eos=hparams.eos, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets), src_vocab_table, tgt_vocab_table) else: return ( iterator_utils.get_infer_iterator( src_dataset=src_dataset, src_vocab_table=src_vocab_table, eos=hparams.eos, batch_size=hparams.batch_size), src_vocab_table, tgt_vocab_table, reverse_tgt_vocab_table)
Example #12
Source File: tf_example_decoder_test.py From BMW-TensorFlow-Training-GUI with Apache License 2.0 | 5 votes |
def testDecodeExampleWithBranchedLookup(self): example = example_pb2.Example(features=feature_pb2.Features(feature={ 'image/object/class/text': self._BytesFeatureFromList( np.array(['cat', 'dog', 'guinea pig'])), })) serialized_example = example.SerializeToString() # 'dog' -> 0, 'guinea pig' -> 1, 'cat' -> 2 table = lookup_ops.index_table_from_tensor( constant_op.constant(['dog', 'guinea pig', 'cat'])) with self.test_session() as sess: sess.run(lookup_ops.tables_initializer()) serialized_example = array_ops.reshape(serialized_example, shape=[]) keys_to_features = { 'image/object/class/text': parsing_ops.VarLenFeature(dtypes.string), } items_to_handlers = { 'labels': tf_example_decoder.LookupTensor('image/object/class/text', table), } decoder = slim_example_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) obtained_class_ids = decoder.decode(serialized_example)[0].eval() self.assertAllClose([2, 0, 1], obtained_class_ids)
Example #13
Source File: tf_example_decoder_test.py From ros_tensorflow with Apache License 2.0 | 5 votes |
def testDecodeExampleWithBranchedLookup(self): example = example_pb2.Example(features=feature_pb2.Features(feature={ 'image/object/class/text': self._BytesFeatureFromList( np.array(['cat', 'dog', 'guinea pig'])), })) serialized_example = example.SerializeToString() # 'dog' -> 0, 'guinea pig' -> 1, 'cat' -> 2 table = lookup_ops.index_table_from_tensor( constant_op.constant(['dog', 'guinea pig', 'cat'])) with self.test_session() as sess: sess.run(lookup_ops.tables_initializer()) serialized_example = array_ops.reshape(serialized_example, shape=[]) keys_to_features = { 'image/object/class/text': parsing_ops.VarLenFeature(dtypes.string), } items_to_handlers = { 'labels': tf_example_decoder.LookupTensor('image/object/class/text', table), } decoder = slim_example_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) obtained_class_ids = decoder.decode(serialized_example)[0].eval() self.assertAllClose([2, 0, 1], obtained_class_ids)
Example #14
Source File: tf_example_decoder_test.py From Gun-Detector with Apache License 2.0 | 5 votes |
def testDecodeExampleWithBranchedLookup(self): example = example_pb2.Example(features=feature_pb2.Features(feature={ 'image/object/class/text': self._BytesFeatureFromList( np.array(['cat', 'dog', 'guinea pig'])), })) serialized_example = example.SerializeToString() # 'dog' -> 0, 'guinea pig' -> 1, 'cat' -> 2 table = lookup_ops.index_table_from_tensor( constant_op.constant(['dog', 'guinea pig', 'cat'])) with self.test_session() as sess: sess.run(lookup_ops.tables_initializer()) serialized_example = array_ops.reshape(serialized_example, shape=[]) keys_to_features = { 'image/object/class/text': parsing_ops.VarLenFeature(dtypes.string), } items_to_handlers = { 'labels': tf_example_decoder.LookupTensor('image/object/class/text', table), } decoder = slim_example_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) obtained_class_ids = decoder.decode(serialized_example)[0].eval() self.assertAllClose([2, 0, 1], obtained_class_ids)
Example #15
Source File: tfexample_decoder_test.py From tf-slim with Apache License 2.0 | 5 votes |
def testDecodeExampleWithLookup(self): example = tf.train.Example( features=tf.train.Features( feature={ 'image/object/class/text': self._BytesFeature(np.array(['cat', 'dog', 'guinea pig'])), })) serialized_example = example.SerializeToString() # 'dog' -> 0, 'guinea pig' -> 1, 'cat' -> 2 table = lookup_ops.index_table_from_tensor( tf.constant(['dog', 'guinea pig', 'cat'])) with self.cached_session() as sess: sess.run(lookup_ops.tables_initializer()) serialized_example = array_ops.reshape(serialized_example, shape=[]) keys_to_features = { 'image/object/class/text': parsing_ops.VarLenFeature(tf.string), } items_to_handlers = { 'labels': tfexample_decoder.LookupTensor('image/object/class/text', table), } decoder = tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) obtained_class_ids = decoder.decode(serialized_example)[0].eval() self.assertAllClose([2, 0, 1], obtained_class_ids)
Example #16
Source File: common_test_utils.py From parallax with Apache License 2.0 | 5 votes |
def create_test_iterator(hparams, mode): """Create test iterator.""" src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant([hparams.eos, "a", "b", "c", "d"])) tgt_vocab_mapping = tf.constant([hparams.sos, hparams.eos, "a", "b", "c"]) tgt_vocab_table = lookup_ops.index_table_from_tensor(tgt_vocab_mapping) if mode == tf.contrib.learn.ModeKeys.INFER: reverse_tgt_vocab_table = lookup_ops.index_to_string_table_from_tensor( tgt_vocab_mapping) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a a b b c", "a b b"])) if mode != tf.contrib.learn.ModeKeys.INFER: tgt_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a b c b c", "a b c b"])) return ( iterator_utils.get_iterator( src_dataset=src_dataset, tgt_dataset=tgt_dataset, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, batch_size=hparams.batch_size, sos=hparams.sos, eos=hparams.eos, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets), src_vocab_table, tgt_vocab_table) else: return ( iterator_utils.get_infer_iterator( src_dataset=src_dataset, src_vocab_table=src_vocab_table, eos=hparams.eos, batch_size=hparams.batch_size), src_vocab_table, tgt_vocab_table, reverse_tgt_vocab_table)
Example #17
Source File: common_test_utils.py From NETransliteration-COLING2018 with MIT License | 5 votes |
def create_test_iterator(hparams, mode): """Create test iterator.""" src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant([hparams.eos, "a", "b", "c", "d"])) tgt_vocab_mapping = tf.constant([hparams.sos, hparams.eos, "a", "b", "c"]) tgt_vocab_table = lookup_ops.index_table_from_tensor(tgt_vocab_mapping) if mode == tf.contrib.learn.ModeKeys.INFER: reverse_tgt_vocab_table = lookup_ops.index_to_string_table_from_tensor( tgt_vocab_mapping) src_dataset = tf.contrib.data.Dataset.from_tensor_slices( tf.constant(["a a b b c", "a b b"])) if mode != tf.contrib.learn.ModeKeys.INFER: tgt_dataset = tf.contrib.data.Dataset.from_tensor_slices( tf.constant(["a b c b c", "a b c b"])) return ( iterator_utils.get_iterator( src_dataset=src_dataset, tgt_dataset=tgt_dataset, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, batch_size=hparams.batch_size, sos=hparams.sos, eos=hparams.eos, source_reverse=hparams.source_reverse, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets), src_vocab_table, tgt_vocab_table) else: return ( iterator_utils.get_infer_iterator( src_dataset=src_dataset, src_vocab_table=src_vocab_table, eos=hparams.eos, source_reverse=hparams.source_reverse, batch_size=hparams.batch_size), src_vocab_table, tgt_vocab_table, reverse_tgt_vocab_table)
Example #18
Source File: tf_example_decoder_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 5 votes |
def testDecodeExampleWithBranchedLookup(self): example = example_pb2.Example(features=feature_pb2.Features(feature={ 'image/object/class/text': self._BytesFeatureFromList( np.array(['cat', 'dog', 'guinea pig'])), })) serialized_example = example.SerializeToString() # 'dog' -> 0, 'guinea pig' -> 1, 'cat' -> 2 table = lookup_ops.index_table_from_tensor( constant_op.constant(['dog', 'guinea pig', 'cat'])) with self.test_session() as sess: sess.run(lookup_ops.tables_initializer()) serialized_example = array_ops.reshape(serialized_example, shape=[]) keys_to_features = { 'image/object/class/text': parsing_ops.VarLenFeature(dtypes.string), } items_to_handlers = { 'labels': tf_example_decoder.LookupTensor('image/object/class/text', table), } decoder = slim_example_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) obtained_class_ids = decoder.decode(serialized_example)[0].eval() self.assertAllClose([2, 0, 1], obtained_class_ids)
Example #19
Source File: tf_example_decoder_test.py From Person-Detection-and-Tracking with MIT License | 5 votes |
def testDecodeExampleWithBranchedLookup(self): example = example_pb2.Example(features=feature_pb2.Features(feature={ 'image/object/class/text': self._BytesFeatureFromList( np.array(['cat', 'dog', 'guinea pig'])), })) serialized_example = example.SerializeToString() # 'dog' -> 0, 'guinea pig' -> 1, 'cat' -> 2 table = lookup_ops.index_table_from_tensor( constant_op.constant(['dog', 'guinea pig', 'cat'])) with self.test_session() as sess: sess.run(lookup_ops.tables_initializer()) serialized_example = array_ops.reshape(serialized_example, shape=[]) keys_to_features = { 'image/object/class/text': parsing_ops.VarLenFeature(dtypes.string), } items_to_handlers = { 'labels': tf_example_decoder.LookupTensor('image/object/class/text', table), } decoder = slim_example_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) obtained_class_ids = decoder.decode(serialized_example)[0].eval() self.assertAllClose([2, 0, 1], obtained_class_ids)
Example #20
Source File: iterator_utils_test.py From training_results_v0.5 with Apache License 2.0 | 5 votes |
def testGetInferIterator(self): src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant(["a", "b", "c", "eos", "sos"])) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["c c a", "c a", "d", "f e a g"])) hparams = tf.contrib.training.HParams( random_seed=3, eos="eos", sos="sos") batch_size = 2 dataset = iterator_utils.get_infer_iterator( src_dataset=src_dataset, src_vocab_table=src_vocab_table, batch_size=batch_size, eos=hparams.eos) table_initializer = tf.tables_initializer() iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() with self.test_session() as sess: sess.run(table_initializer) sess.run(iterator.initializer) features = sess.run(get_next) self.assertAllEqual( [ [2, 2, 0], # c c a [2, 0, 3] ], # c a eos features["source"]) self.assertAllEqual([3, 2], features["source_sequence_length"])
Example #21
Source File: iterator_utils_test.py From training_results_v0.5 with Apache License 2.0 | 5 votes |
def testGetInferIterator(self): src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant(["a", "b", "c", "eos", "sos"])) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["c c a", "c a", "d", "f e a g"])) hparams = tf.contrib.training.HParams( random_seed=3, eos="eos", sos="sos") batch_size = 2 dataset = iterator_utils.get_infer_iterator( src_dataset=src_dataset, src_vocab_table=src_vocab_table, batch_size=batch_size, eos=hparams.eos) table_initializer = tf.tables_initializer() iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() with self.test_session() as sess: sess.run(table_initializer) sess.run(iterator.initializer) features = sess.run(get_next) self.assertAllEqual( [ [2, 2, 0], # c c a [2, 0, 3] ], # c a eos features["source"]) self.assertAllEqual([3, 2], features["source_sequence_length"])
Example #22
Source File: iterator_utils_test.py From training_results_v0.5 with Apache License 2.0 | 5 votes |
def testGetInferIterator(self): src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant(["a", "b", "c", "eos", "sos"])) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["c c a", "c a", "d", "f e a g"])) hparams = tf.contrib.training.HParams( random_seed=3, eos="eos", sos="sos") batch_size = 2 dataset = iterator_utils.get_infer_iterator( src_dataset=src_dataset, src_vocab_table=src_vocab_table, batch_size=batch_size, eos=hparams.eos) table_initializer = tf.tables_initializer() iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() with self.test_session() as sess: sess.run(table_initializer) sess.run(iterator.initializer) features = sess.run(get_next) self.assertAllEqual( [ [2, 2, 0], # c c a [2, 0, 3] ], # c a eos features["source"]) self.assertAllEqual([3, 2], features["source_sequence_length"])
Example #23
Source File: iterator_utils_test.py From training_results_v0.5 with Apache License 2.0 | 4 votes |
def testGetIteratorWithShard(self): tf.set_random_seed(1) tgt_vocab_table = src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant(["a", "b", "c", "eos", "sos"])) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["c c a", "f e a g", "d", "c a"])) tgt_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a b", "c c", "", "b c"])) hparams = tf.contrib.training.HParams( random_seed=3, num_buckets=5, eos="eos", sos="sos") batch_size = 2 src_max_len = 3 dataset = iterator_utils.get_iterator( src_dataset=src_dataset, tgt_dataset=tgt_dataset, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, batch_size=batch_size, sos=hparams.sos, eos=hparams.eos, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets, src_max_len=src_max_len, num_shards=2, shard_index=1, reshuffle_each_iteration=False) table_initializer = tf.tables_initializer() iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() with self.test_session() as sess: sess.run(table_initializer) sess.run(iterator.initializer) features = sess.run(get_next) self.assertAllEqual( [[2, 0, 3], # c a eos -- eos is padding [-1, -1, 0]], # "f" == unknown, "e" == unknown, a features["source"]) self.assertAllEqual([2, 3], features["source_sequence_length"]) self.assertAllEqual( [[4, 1, 2], # sos b c [4, 2, 2]], # sos c c features["target_input"]) self.assertAllEqual( [[1, 2, 3], # b c eos [2, 2, 3]], # c c eos features["target_output"]) self.assertAllEqual([3, 3], features["target_sequence_length"])
Example #24
Source File: tf_example_decoder_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 4 votes |
def testDecodeExampleWithBranchedBackupHandler(self): example1 = example_pb2.Example( features=feature_pb2.Features( feature={ 'image/object/class/text': self._BytesFeatureFromList( np.array(['cat', 'dog', 'guinea pig'])), 'image/object/class/label': self._Int64FeatureFromList(np.array([42, 10, 900])) })) example2 = example_pb2.Example( features=feature_pb2.Features( feature={ 'image/object/class/text': self._BytesFeatureFromList( np.array(['cat', 'dog', 'guinea pig'])), })) example3 = example_pb2.Example( features=feature_pb2.Features( feature={ 'image/object/class/label': self._Int64FeatureFromList(np.array([42, 10, 901])) })) # 'dog' -> 0, 'guinea pig' -> 1, 'cat' -> 2 table = lookup_ops.index_table_from_tensor( constant_op.constant(['dog', 'guinea pig', 'cat'])) keys_to_features = { 'image/object/class/text': parsing_ops.VarLenFeature(dtypes.string), 'image/object/class/label': parsing_ops.VarLenFeature(dtypes.int64), } backup_handler = tf_example_decoder.BackupHandler( handler=slim_example_decoder.Tensor('image/object/class/label'), backup=tf_example_decoder.LookupTensor('image/object/class/text', table)) items_to_handlers = { 'labels': backup_handler, } decoder = slim_example_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) obtained_class_ids_each_example = [] with self.test_session() as sess: sess.run(lookup_ops.tables_initializer()) for example in [example1, example2, example3]: serialized_example = array_ops.reshape( example.SerializeToString(), shape=[]) obtained_class_ids_each_example.append( decoder.decode(serialized_example)[0].eval()) self.assertAllClose([42, 10, 900], obtained_class_ids_each_example[0]) self.assertAllClose([2, 0, 1], obtained_class_ids_each_example[1]) self.assertAllClose([42, 10, 901], obtained_class_ids_each_example[2])
Example #25
Source File: tf_example_decoder_test.py From Person-Detection-and-Tracking with MIT License | 4 votes |
def testDecodeExampleWithBranchedBackupHandler(self): example1 = example_pb2.Example( features=feature_pb2.Features( feature={ 'image/object/class/text': self._BytesFeatureFromList( np.array(['cat', 'dog', 'guinea pig'])), 'image/object/class/label': self._Int64FeatureFromList(np.array([42, 10, 900])) })) example2 = example_pb2.Example( features=feature_pb2.Features( feature={ 'image/object/class/text': self._BytesFeatureFromList( np.array(['cat', 'dog', 'guinea pig'])), })) example3 = example_pb2.Example( features=feature_pb2.Features( feature={ 'image/object/class/label': self._Int64FeatureFromList(np.array([42, 10, 901])) })) # 'dog' -> 0, 'guinea pig' -> 1, 'cat' -> 2 table = lookup_ops.index_table_from_tensor( constant_op.constant(['dog', 'guinea pig', 'cat'])) keys_to_features = { 'image/object/class/text': parsing_ops.VarLenFeature(dtypes.string), 'image/object/class/label': parsing_ops.VarLenFeature(dtypes.int64), } backup_handler = tf_example_decoder.BackupHandler( handler=slim_example_decoder.Tensor('image/object/class/label'), backup=tf_example_decoder.LookupTensor('image/object/class/text', table)) items_to_handlers = { 'labels': backup_handler, } decoder = slim_example_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) obtained_class_ids_each_example = [] with self.test_session() as sess: sess.run(lookup_ops.tables_initializer()) for example in [example1, example2, example3]: serialized_example = array_ops.reshape( example.SerializeToString(), shape=[]) obtained_class_ids_each_example.append( decoder.decode(serialized_example)[0].eval()) self.assertAllClose([42, 10, 900], obtained_class_ids_each_example[0]) self.assertAllClose([2, 0, 1], obtained_class_ids_each_example[1]) self.assertAllClose([42, 10, 901], obtained_class_ids_each_example[2])
Example #26
Source File: common_test_utils.py From active-qa with Apache License 2.0 | 4 votes |
def create_test_iterator(hparams, mode, trie_excludes=None): """Create test iterator.""" src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant([hparams.eos, "a", "b", "c", "d"])) tgt_vocab_mapping = tf.constant([hparams.sos, hparams.eos, "a", "b", "c"]) tgt_vocab_table = lookup_ops.index_table_from_tensor(tgt_vocab_mapping) reverse_tgt_vocab_table = lookup_ops.index_to_string_table_from_tensor( tgt_vocab_mapping) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a a b b c", "a b b"])) ctx_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["c b c b a", "b c b a"])) trie_excludes = trie_excludes or [] trie_excludes = " {} ".format(hparams.eos).join(trie_excludes) tex_dataset = tf.data.Dataset.from_tensor_slices( tf.constant([trie_excludes, trie_excludes])) if mode != tf.contrib.learn.ModeKeys.INFER: tgt_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a b c b c", "a b c b"])) return (iterator_utils.get_iterator( hparams=hparams, src_dataset=src_dataset, tgt_dataset=tgt_dataset, ctx_dataset=ctx_dataset, annot_dataset=None, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, batch_size=hparams.batch_size, sos=hparams.sos, eos=hparams.eos, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets), src_vocab_table, tgt_vocab_table, reverse_tgt_vocab_table) else: return (iterator_utils.get_infer_iterator( hparams=hparams, src_dataset=src_dataset, ctx_dataset=ctx_dataset, annot_dataset=None, trie_exclude_dataset=tex_dataset, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, eos=hparams.eos, batch_size=hparams.batch_size), src_vocab_table, tgt_vocab_table, reverse_tgt_vocab_table)
Example #27
Source File: iterator_utils_test.py From active-qa with Apache License 2.0 | 4 votes |
def testGetInferIterator(self): src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant(["a", "b", "c", "eos", "sos"])) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["c c a", "c a", "d", "f e a g"])) hparams = tf.contrib.training.HParams( random_seed=3, eos="eos", sos="sos", context_feed="") batch_size = 2 src_max_len = 3 iterator = iterator_utils.get_infer_iterator( hparams=hparams, src_dataset=src_dataset, src_vocab_table=src_vocab_table, batch_size=batch_size, eos=hparams.eos, src_max_len=src_max_len) table_initializer = tf.tables_initializer() source = iterator.source seq_len = iterator.source_sequence_length self.assertEqual([None, None], source.shape.as_list()) self.assertEqual([None], seq_len.shape.as_list()) with self.test_session() as sess: sess.run(table_initializer) sess.run(iterator.initializer) (source_v, seq_len_v) = sess.run((source, seq_len)) self.assertAllEqual( [ [2, 2, 0], # c c a [2, 0, 3], # c a eos ], source_v) self.assertAllEqual([3, 2], seq_len_v) (source_v, seq_len_v) = sess.run((source, seq_len)) self.assertAllEqual( [ [-1, 3, 3], # "d" == unknown, eos eos [-1, -1, 0] # "f" == unknown, "e" == unknown, a ], source_v) self.assertAllEqual([1, 3], seq_len_v) with self.assertRaisesOpError("End of sequence"): sess.run((source, seq_len))
Example #28
Source File: iterator_utils_test.py From training_results_v0.5 with Apache License 2.0 | 4 votes |
def testGetIterator(self): tf.set_random_seed(1) tgt_vocab_table = src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant(["a", "b", "c", "eos", "sos"])) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["f e a g", "c c a", "d", "c a"])) tgt_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["c c", "a b", "", "b c"])) hparams = tf.contrib.training.HParams( random_seed=3, num_buckets=5, eos="eos", sos="sos") batch_size = 2 src_max_len = 3 dataset = iterator_utils.get_iterator( src_dataset=src_dataset, tgt_dataset=tgt_dataset, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, batch_size=batch_size, sos=hparams.sos, eos=hparams.eos, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets, src_max_len=src_max_len, reshuffle_each_iteration=False) table_initializer = tf.tables_initializer() iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() with self.test_session() as sess: sess.run(table_initializer) sess.run(iterator.initializer) features = sess.run(get_next) self.assertAllEqual( [[2, 0, 3], # c a eos -- eos is padding [2, 2, 0]], # c c a features["source"]) self.assertAllEqual([2, 3], features["source_sequence_length"]) self.assertAllEqual( [[4, 1, 2], # sos b c [4, 0, 1]], # sos a b features["target_input"]) self.assertAllEqual( [[1, 2, 3], # b c eos [0, 1, 3]], # a b eos features["target_output"]) self.assertAllEqual([3, 3], features["target_sequence_length"])
Example #29
Source File: iterator_utils_test.py From training_results_v0.5 with Apache License 2.0 | 4 votes |
def testGetIteratorWithShard(self): tf.set_random_seed(1) tgt_vocab_table = src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant(["a", "b", "c", "eos", "sos"])) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["c c a", "f e a g", "d", "c a"])) tgt_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["a b", "c c", "", "b c"])) hparams = tf.contrib.training.HParams( random_seed=3, num_buckets=5, eos="eos", sos="sos") batch_size = 2 src_max_len = 3 dataset = iterator_utils.get_iterator( src_dataset=src_dataset, tgt_dataset=tgt_dataset, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, batch_size=batch_size, sos=hparams.sos, eos=hparams.eos, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets, src_max_len=src_max_len, num_shards=2, shard_index=1, reshuffle_each_iteration=False) table_initializer = tf.tables_initializer() iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() with self.test_session() as sess: sess.run(table_initializer) sess.run(iterator.initializer) features = sess.run(get_next) self.assertAllEqual( [[2, 0, 3], # c a eos -- eos is padding [-1, -1, 0]], # "f" == unknown, "e" == unknown, a features["source"]) self.assertAllEqual([2, 3], features["source_sequence_length"]) self.assertAllEqual( [[4, 1, 2], # sos b c [4, 2, 2]], # sos c c features["target_input"]) self.assertAllEqual( [[1, 2, 3], # b c eos [2, 2, 3]], # c c eos features["target_output"]) self.assertAllEqual([3, 3], features["target_sequence_length"])
Example #30
Source File: iterator_utils_test.py From training_results_v0.5 with Apache License 2.0 | 4 votes |
def testGetIterator(self): tf.set_random_seed(1) tgt_vocab_table = src_vocab_table = lookup_ops.index_table_from_tensor( tf.constant(["a", "b", "c", "eos", "sos"])) src_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["f e a g", "c c a", "d", "c a"])) tgt_dataset = tf.data.Dataset.from_tensor_slices( tf.constant(["c c", "a b", "", "b c"])) hparams = tf.contrib.training.HParams( random_seed=3, num_buckets=5, eos="eos", sos="sos") batch_size = 2 src_max_len = 3 dataset = iterator_utils.get_iterator( src_dataset=src_dataset, tgt_dataset=tgt_dataset, src_vocab_table=src_vocab_table, tgt_vocab_table=tgt_vocab_table, batch_size=batch_size, sos=hparams.sos, eos=hparams.eos, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets, src_max_len=src_max_len, reshuffle_each_iteration=False) table_initializer = tf.tables_initializer() iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() with self.test_session() as sess: sess.run(table_initializer) sess.run(iterator.initializer) features = sess.run(get_next) self.assertAllEqual( [[-1, -1, 0], # "f" == unknown, "e" == unknown, a [2, 0, 3]], # c a eos -- eos is padding features["source"]) self.assertAllEqual([3, 2], features["source_sequence_length"]) self.assertAllEqual( [[4, 2, 2], # sos c c [4, 1, 2]], # sos b c features["target_input"]) self.assertAllEqual( [[2, 2, 3], # c c eos [1, 2, 3]], # b c eos features["target_output"]) self.assertAllEqual([3, 3], features["target_sequence_length"]) with self.assertRaisesOpError("End of sequence"): sess.run(get_next)