Python tensorflow.contrib.slim.one_hot_encoding() Examples

The following are 30 code examples of tensorflow.contrib.slim.one_hot_encoding(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.contrib.slim , or try the search function .
Example #1
Source File: model_test.py    From object_detection_kitti with Apache License 2.0 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #2
Source File: model_test.py    From multilabel-image-classification-tensorflow with MIT License 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #3
Source File: model.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def char_predictions(self, chars_logit):
    """Returns confidence scores (softmax values) for predicted characters.

    Args:
      chars_logit: chars logits, a tensor with shape
        [batch_size x seq_length x num_char_classes]

    Returns:
      A tuple (ids, log_prob, scores), where:
        ids - predicted characters, a int32 tensor with shape
          [batch_size x seq_length];
        log_prob - a log probability of all characters, a float tensor with
          shape [batch_size, seq_length, num_char_classes];
        scores - corresponding confidence scores for characters, a float
        tensor
          with shape [batch_size x seq_length].
    """
    log_prob = utils.logits_to_log_prob(chars_logit)
    ids = tf.to_int32(tf.argmax(log_prob, dimension=2), name='predicted_chars')
    mask = tf.cast(
        slim.one_hot_encoding(ids, self._params.num_char_classes), tf.bool)
    all_scores = tf.nn.softmax(chars_logit)
    selected_scores = tf.boolean_mask(all_scores, mask, name='char_scores')
    scores = tf.reshape(selected_scores, shape=(-1, self._params.seq_length))
    return ids, log_prob, scores 
Example #4
Source File: model.py    From multilabel-image-classification-tensorflow with MIT License 6 votes vote down vote up
def char_predictions(self, chars_logit):
    """Returns confidence scores (softmax values) for predicted characters.

    Args:
      chars_logit: chars logits, a tensor with shape
        [batch_size x seq_length x num_char_classes]

    Returns:
      A tuple (ids, log_prob, scores), where:
        ids - predicted characters, a int32 tensor with shape
          [batch_size x seq_length];
        log_prob - a log probability of all characters, a float tensor with
          shape [batch_size, seq_length, num_char_classes];
        scores - corresponding confidence scores for characters, a float
        tensor
          with shape [batch_size x seq_length].
    """
    log_prob = utils.logits_to_log_prob(chars_logit)
    ids = tf.to_int32(tf.argmax(log_prob, axis=2), name='predicted_chars')
    mask = tf.cast(
      slim.one_hot_encoding(ids, self._params.num_char_classes), tf.bool)
    all_scores = tf.nn.softmax(chars_logit)
    selected_scores = tf.boolean_mask(all_scores, mask, name='char_scores')
    scores = tf.reshape(selected_scores, shape=(-1, self._params.seq_length))
    return ids, log_prob, scores 
Example #5
Source File: model_test.py    From yolo_v2 with Apache License 2.0 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #6
Source File: tf_module.py    From LaneSegmentationNetwork with GNU Lesser General Public License v3.0 6 votes vote down vote up
def class_and_spatial_loss(logits, onehot_labels, weights, weights2):
    logits_shape = tf.shape(logits)
    onehot_labels_shape = tf.shape(onehot_labels)
    image_labels = tf.reshape(onehot_labels, logits_shape)
    class_loss = tf.losses.softmax_cross_entropy(
        onehot_labels=onehot_labels,
        logits=tf.reshape(logits, [-1, onehot_labels_shape[-1]]),
        weights=weights * weights2
    )

    image_weights = tf.reshape(weights, [logits_shape[0], logits_shape[1], logits_shape[2], 1])
    predict_class = tf.argmax(logits, axis=3)
    predict_class = slim.one_hot_encoding(predict_class, onehot_labels_shape[-1], 1.0, 0.0)
    union = to_float(to_bool(predict_class + image_labels)) * image_weights
    intersection = to_float(tf.logical_and(to_bool(predict_class), to_bool(image_labels))) * image_weights
    label_on = to_float(tf.greater(tf.reduce_sum(image_labels, axis=[1, 2]), 0))
    spatial_loss = ((tf.reduce_sum(intersection, axis=[1, 2]) + 1) / (tf.reduce_sum(union, axis=[1, 2]) + 1))
    spatial_loss = tf.reduce_mean(-tf.log(spatial_loss) * label_on)

    return class_loss + spatial_loss 
Example #7
Source File: model.py    From yolo_v2 with Apache License 2.0 6 votes vote down vote up
def char_predictions(self, chars_logit):
    """Returns confidence scores (softmax values) for predicted characters.

    Args:
      chars_logit: chars logits, a tensor with shape
        [batch_size x seq_length x num_char_classes]

    Returns:
      A tuple (ids, log_prob, scores), where:
        ids - predicted characters, a int32 tensor with shape
          [batch_size x seq_length];
        log_prob - a log probability of all characters, a float tensor with
          shape [batch_size, seq_length, num_char_classes];
        scores - corresponding confidence scores for characters, a float
        tensor
          with shape [batch_size x seq_length].
    """
    log_prob = utils.logits_to_log_prob(chars_logit)
    ids = tf.to_int32(tf.argmax(log_prob, axis=2), name='predicted_chars')
    mask = tf.cast(
      slim.one_hot_encoding(ids, self._params.num_char_classes), tf.bool)
    all_scores = tf.nn.softmax(chars_logit)
    selected_scores = tf.boolean_mask(all_scores, mask, name='char_scores')
    scores = tf.reshape(selected_scores, shape=(-1, self._params.seq_length))
    return ids, log_prob, scores 
Example #8
Source File: model.py    From models with Apache License 2.0 6 votes vote down vote up
def char_predictions(self, chars_logit):
    """Returns confidence scores (softmax values) for predicted characters.

    Args:
      chars_logit: chars logits, a tensor with shape
        [batch_size x seq_length x num_char_classes]

    Returns:
      A tuple (ids, log_prob, scores), where:
        ids - predicted characters, a int32 tensor with shape
          [batch_size x seq_length];
        log_prob - a log probability of all characters, a float tensor with
          shape [batch_size, seq_length, num_char_classes];
        scores - corresponding confidence scores for characters, a float
        tensor
          with shape [batch_size x seq_length].
    """
    log_prob = utils.logits_to_log_prob(chars_logit)
    ids = tf.to_int32(tf.argmax(log_prob, axis=2), name='predicted_chars')
    mask = tf.cast(
      slim.one_hot_encoding(ids, self._params.num_char_classes), tf.bool)
    all_scores = tf.nn.softmax(chars_logit)
    selected_scores = tf.boolean_mask(all_scores, mask, name='char_scores')
    scores = tf.reshape(selected_scores, shape=(-1, self._params.seq_length))
    return ids, log_prob, scores 
Example #9
Source File: model_test.py    From Gun-Detector with Apache License 2.0 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #10
Source File: model_test.py    From models with Apache License 2.0 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #11
Source File: model.py    From Gun-Detector with Apache License 2.0 6 votes vote down vote up
def char_predictions(self, chars_logit):
    """Returns confidence scores (softmax values) for predicted characters.

    Args:
      chars_logit: chars logits, a tensor with shape
        [batch_size x seq_length x num_char_classes]

    Returns:
      A tuple (ids, log_prob, scores), where:
        ids - predicted characters, a int32 tensor with shape
          [batch_size x seq_length];
        log_prob - a log probability of all characters, a float tensor with
          shape [batch_size, seq_length, num_char_classes];
        scores - corresponding confidence scores for characters, a float
        tensor
          with shape [batch_size x seq_length].
    """
    log_prob = utils.logits_to_log_prob(chars_logit)
    ids = tf.to_int32(tf.argmax(log_prob, axis=2), name='predicted_chars')
    mask = tf.cast(
      slim.one_hot_encoding(ids, self._params.num_char_classes), tf.bool)
    all_scores = tf.nn.softmax(chars_logit)
    selected_scores = tf.boolean_mask(all_scores, mask, name='char_scores')
    scores = tf.reshape(selected_scores, shape=(-1, self._params.seq_length))
    return ids, log_prob, scores 
Example #12
Source File: model_test.py    From g-tensorflow-models with Apache License 2.0 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #13
Source File: classify.py    From mayo with MIT License 6 votes vote down vote up
def _top(self, prediction, truth, num_tops=1):
        # a full sort using top_k
        values, indices = tf.nn.top_k(prediction, self.num_classes)
        # cut-off threshold
        thresholds = values[:, (num_tops - 1):num_tops]
        # if > threshold, weight = 1, else weight = 0
        valids = tf.cast(prediction > thresholds, tf.float32)
        # ties should have weight = 1 / num_ties
        ties = tf.equal(prediction, thresholds)
        num_ties = tf.reduce_sum(
            tf.cast(ties, tf.float32), axis=-1, keepdims=True)
        num_ties = tf.py_func(
            self._warn_ties, [ties, num_ties, thresholds],
            tf.float32, stateful=False)
        num_ties = tf.tile(num_ties, [1, self.num_classes])
        weights = tf.where(ties, 1 / num_ties, valids)
        return slim.one_hot_encoding(truth, self.num_classes) * weights 
Example #14
Source File: model.py    From g-tensorflow-models with Apache License 2.0 6 votes vote down vote up
def char_predictions(self, chars_logit):
    """Returns confidence scores (softmax values) for predicted characters.

    Args:
      chars_logit: chars logits, a tensor with shape
        [batch_size x seq_length x num_char_classes]

    Returns:
      A tuple (ids, log_prob, scores), where:
        ids - predicted characters, a int32 tensor with shape
          [batch_size x seq_length];
        log_prob - a log probability of all characters, a float tensor with
          shape [batch_size, seq_length, num_char_classes];
        scores - corresponding confidence scores for characters, a float
        tensor
          with shape [batch_size x seq_length].
    """
    log_prob = utils.logits_to_log_prob(chars_logit)
    ids = tf.to_int32(tf.argmax(log_prob, axis=2), name='predicted_chars')
    mask = tf.cast(
      slim.one_hot_encoding(ids, self._params.num_char_classes), tf.bool)
    all_scores = tf.nn.softmax(chars_logit)
    selected_scores = tf.boolean_mask(all_scores, mask, name='char_scores')
    scores = tf.reshape(selected_scores, shape=(-1, self._params.seq_length))
    return ids, log_prob, scores 
Example #15
Source File: model.py    From object_detection_with_tensorflow with MIT License 6 votes vote down vote up
def char_predictions(self, chars_logit):
    """Returns confidence scores (softmax values) for predicted characters.

    Args:
      chars_logit: chars logits, a tensor with shape
        [batch_size x seq_length x num_char_classes]

    Returns:
      A tuple (ids, log_prob, scores), where:
        ids - predicted characters, a int32 tensor with shape
          [batch_size x seq_length];
        log_prob - a log probability of all characters, a float tensor with
          shape [batch_size, seq_length, num_char_classes];
        scores - corresponding confidence scores for characters, a float
        tensor
          with shape [batch_size x seq_length].
    """
    log_prob = utils.logits_to_log_prob(chars_logit)
    ids = tf.to_int32(tf.argmax(log_prob, axis=2), name='predicted_chars')
    mask = tf.cast(
        slim.one_hot_encoding(ids, self._params.num_char_classes), tf.bool)
    all_scores = tf.nn.softmax(chars_logit)
    selected_scores = tf.boolean_mask(all_scores, mask, name='char_scores')
    scores = tf.reshape(selected_scores, shape=(-1, self._params.seq_length))
    return ids, log_prob, scores 
Example #16
Source File: detnet.py    From social-scene-understanding with GNU General Public License v3.0 6 votes vote down vote up
def det_net_loss(seg_masks_in, reg_masks_in,
                 seg_preds, reg_preds,
                 reg_loss_weight=10.0,
                 epsilon=1e-5):

  with tf.variable_scope('loss'):
    out_size = seg_preds.get_shape()[1:3]
    seg_masks_in_ds = tf.image.resize_images(seg_masks_in[:,:,:,tf.newaxis],
                                             out_size[0], out_size[1],
                                             tf.image.ResizeMethod.NEAREST_NEIGHBOR)
    reg_masks_in_ds = tf.image.resize_images(reg_masks_in,
                                             out_size[0], out_size[1],
                                             tf.image.ResizeMethod.NEAREST_NEIGHBOR)

    # segmentation loss
    seg_masks_onehot = slim.one_hot_encoding(seg_masks_in_ds[:,:,:,0], 2)
    seg_loss = - tf.reduce_mean(seg_masks_onehot * tf.log(seg_preds + epsilon))

    # regression loss
    mask = tf.to_float(seg_masks_in_ds)
    reg_loss = tf.reduce_sum(mask * (reg_preds - reg_masks_in_ds)**2)
    reg_loss = reg_loss / (tf.reduce_sum(mask) + 1.0)

  return seg_loss + reg_loss_weight * reg_loss 
Example #17
Source File: model_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #18
Source File: model.py    From object_detection_kitti with Apache License 2.0 6 votes vote down vote up
def char_predictions(self, chars_logit):
    """Returns confidence scores (softmax values) for predicted characters.

    Args:
      chars_logit: chars logits, a tensor with shape
        [batch_size x seq_length x num_char_classes]

    Returns:
      A tuple (ids, log_prob, scores), where:
        ids - predicted characters, a int32 tensor with shape
          [batch_size x seq_length];
        log_prob - a log probability of all characters, a float tensor with
          shape [batch_size, seq_length, num_char_classes];
        scores - corresponding confidence scores for characters, a float
        tensor
          with shape [batch_size x seq_length].
    """
    log_prob = utils.logits_to_log_prob(chars_logit)
    ids = tf.to_int32(tf.argmax(log_prob, dimension=2), name='predicted_chars')
    mask = tf.cast(
        slim.one_hot_encoding(ids, self._params.num_char_classes), tf.bool)
    all_scores = tf.nn.softmax(chars_logit)
    selected_scores = tf.boolean_mask(all_scores, mask, name='char_scores')
    scores = tf.reshape(selected_scores, shape=(-1, self._params.seq_length))
    return ids, log_prob, scores 
Example #19
Source File: model_test.py    From hands-detection with MIT License 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #20
Source File: model.py    From hands-detection with MIT License 6 votes vote down vote up
def char_predictions(self, chars_logit):
    """Returns confidence scores (softmax values) for predicted characters.

    Args:
      chars_logit: chars logits, a tensor with shape
        [batch_size x seq_length x num_char_classes]

    Returns:
      A tuple (ids, log_prob, scores), where:
        ids - predicted characters, a int32 tensor with shape
          [batch_size x seq_length];
        log_prob - a log probability of all characters, a float tensor with
          shape [batch_size, seq_length, num_char_classes];
        scores - corresponding confidence scores for characters, a float
        tensor
          with shape [batch_size x seq_length].
    """
    log_prob = utils.logits_to_log_prob(chars_logit)
    ids = tf.to_int32(tf.argmax(log_prob, dimension=2), name='predicted_chars')
    mask = tf.cast(
        slim.one_hot_encoding(ids, self._params.num_char_classes), tf.bool)
    all_scores = tf.nn.softmax(chars_logit)
    selected_scores = tf.boolean_mask(all_scores, mask, name='char_scores')
    scores = tf.reshape(selected_scores, shape=(-1, self._params.seq_length))
    return ids, log_prob, scores 
Example #21
Source File: model_test.py    From object_detection_with_tensorflow with MIT License 6 votes vote down vote up
def test_create_summaries_is_runnable(self):
    ocr_model = self.create_model()
    data = data_provider.InputEndpoints(
        images=self.fake_images,
        images_orig=self.fake_images,
        labels=self.fake_labels,
        labels_one_hot=slim.one_hot_encoding(self.fake_labels,
                                             self.num_char_classes))
    endpoints = ocr_model.create_base(
        images=self.fake_images, labels_one_hot=None)
    charset = create_fake_charset(self.num_char_classes)
    summaries = ocr_model.create_summaries(
        data, endpoints, charset, is_training=False)
    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      sess.run(tf.local_variables_initializer())
      tf.tables_initializer().run()
      sess.run(summaries)  # just check it is runnable 
Example #22
Source File: sequence_layers_test.py    From g-tensorflow-models with Apache License 2.0 5 votes vote down vote up
def fake_labels(batch_size, seq_length, num_char_classes):
  labels_np = tf.convert_to_tensor(
      np.random.randint(
          low=0, high=num_char_classes, size=(batch_size, seq_length)))
  return slim.one_hot_encoding(labels_np, num_classes=num_char_classes) 
Example #23
Source File: model_test.py    From multilabel-image-classification-tensorflow with MIT License 5 votes vote down vote up
def encode_coordinates_alt(self, net):
    """An alternative implemenation for the encoding coordinates.

    Args:
      net: a tensor of shape=[batch_size, height, width, num_features]

    Returns:
      a list of tensors with encoded image coordinates in them.
    """
    batch_size, h, w, _ = net.shape.as_list()
    h_loc = [
      tf.tile(
          tf.reshape(
              tf.contrib.layers.one_hot_encoding(
                  tf.constant([i]), num_classes=h), [h, 1]), [1, w])
      for i in xrange(h)
    ]
    h_loc = tf.concat([tf.expand_dims(t, 2) for t in h_loc], 2)
    w_loc = [
      tf.tile(
          tf.contrib.layers.one_hot_encoding(tf.constant([i]), num_classes=w),
          [h, 1]) for i in xrange(w)
    ]
    w_loc = tf.concat([tf.expand_dims(t, 2) for t in w_loc], 2)
    loc = tf.concat([h_loc, w_loc], 2)
    loc = tf.tile(tf.expand_dims(loc, 0), [batch_size, 1, 1, 1])
    return tf.concat([net, loc], 3) 
Example #24
Source File: sequence_layers.py    From object_detection_with_tensorflow with MIT License 5 votes vote down vote up
def char_one_hot(self, logit):
    """Creates one hot encoding for a logit of a character.

    Args:
      logit: A tensor with shape [batch_size, num_char_classes].

    Returns:
      A tensor with shape [batch_size, num_char_classes]
    """
    prediction = tf.argmax(logit, axis=1)
    return slim.one_hot_encoding(prediction, self._params.num_char_classes) 
Example #25
Source File: sequence_layers_test.py    From object_detection_kitti with Apache License 2.0 5 votes vote down vote up
def fake_labels(batch_size, seq_length, num_char_classes):
  labels_np = tf.convert_to_tensor(
      np.random.randint(
          low=0, high=num_char_classes, size=(batch_size, seq_length)))
  return slim.one_hot_encoding(labels_np, num_classes=num_char_classes) 
Example #26
Source File: sequence_layers.py    From DOTA_models with Apache License 2.0 5 votes vote down vote up
def char_one_hot(self, logit):
    """Creates one hot encoding for a logit of a character.

    Args:
      logit: A tensor with shape [batch_size, num_char_classes].

    Returns:
      A tensor with shape [batch_size, num_char_classes]
    """
    prediction = tf.argmax(logit, dimension=1)
    return slim.one_hot_encoding(prediction, self._params.num_char_classes) 
Example #27
Source File: model_test.py    From object_detection_kitti with Apache License 2.0 5 votes vote down vote up
def encode_coordinates_alt(self, net):
    """An alternative implemenation for the encoding coordinates.

    Args:
      net: a tensor of shape=[batch_size, height, width, num_features]

    Returns:
      a list of tensors with encoded image coordinates in them.
    """
    batch_size, h, w, _ = net.shape.as_list()
    h_loc = [
      tf.tile(
          tf.reshape(
              tf.contrib.layers.one_hot_encoding(
                  tf.constant([i]), num_classes=h), [h, 1]), [1, w])
      for i in xrange(h)
    ]
    h_loc = tf.concat([tf.expand_dims(t, 2) for t in h_loc], 2)
    w_loc = [
      tf.tile(
          tf.contrib.layers.one_hot_encoding(tf.constant([i]), num_classes=w),
          [h, 1]) for i in xrange(w)
    ]
    w_loc = tf.concat([tf.expand_dims(t, 2) for t in w_loc], 2)
    loc = tf.concat([h_loc, w_loc], 2)
    loc = tf.tile(tf.expand_dims(loc, 0), [batch_size, 1, 1, 1])
    return tf.concat([net, loc], 3) 
Example #28
Source File: sequence_layers_test.py    From multilabel-image-classification-tensorflow with MIT License 5 votes vote down vote up
def fake_labels(batch_size, seq_length, num_char_classes):
  labels_np = tf.convert_to_tensor(
      np.random.randint(
          low=0, high=num_char_classes, size=(batch_size, seq_length)))
  return slim.one_hot_encoding(labels_np, num_classes=num_char_classes) 
Example #29
Source File: model_test.py    From models with Apache License 2.0 5 votes vote down vote up
def encode_coordinates_alt(self, net):
    """An alternative implemenation for the encoding coordinates.

    Args:
      net: a tensor of shape=[batch_size, height, width, num_features]

    Returns:
      a list of tensors with encoded image coordinates in them.
    """
    batch_size, h, w, _ = net.shape.as_list()
    h_loc = [
      tf.tile(
          tf.reshape(
              tf.contrib.layers.one_hot_encoding(
                  tf.constant([i]), num_classes=h), [h, 1]), [1, w])
      for i in range(h)
    ]
    h_loc = tf.concat([tf.expand_dims(t, 2) for t in h_loc], 2)
    w_loc = [
      tf.tile(
          tf.contrib.layers.one_hot_encoding(tf.constant([i]), num_classes=w),
          [h, 1]) for i in range(w)
    ]
    w_loc = tf.concat([tf.expand_dims(t, 2) for t in w_loc], 2)
    loc = tf.concat([h_loc, w_loc], 2)
    loc = tf.tile(tf.expand_dims(loc, 0), [batch_size, 1, 1, 1])
    return tf.concat([net, loc], 3) 
Example #30
Source File: sequence_layers.py    From object_detection_kitti with Apache License 2.0 5 votes vote down vote up
def char_one_hot(self, logit):
    """Creates one hot encoding for a logit of a character.

    Args:
      logit: A tensor with shape [batch_size, num_char_classes].

    Returns:
      A tensor with shape [batch_size, num_char_classes]
    """
    prediction = tf.argmax(logit, dimension=1)
    return slim.one_hot_encoding(prediction, self._params.num_char_classes)