Python tensorflow.squared_difference() Examples
The following are 30
code examples of tensorflow.squared_difference().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: preprocessor_test.py From yolo_v2 with Apache License 2.0 | 6 votes |
def testRandomVerticalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_vertical_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterUpDownFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #2
Source File: cost.py From LapSRN-tensorflow with Apache License 2.0 | 6 votes |
def mean_squared_error(output, target, is_mean=False): """Return the TensorFlow expression of mean-squre-error of two distributions. Parameters ---------- output : 2D or 4D tensor. target : 2D or 4D tensor. is_mean : boolean, if True, use ``tf.reduce_mean`` to compute the loss of one data, otherwise, use ``tf.reduce_sum`` (default). References ------------ - `Wiki Mean Squared Error <https://en.wikipedia.org/wiki/Mean_squared_error>`_ """ with tf.name_scope("mean_squared_error_loss"): if output.get_shape().ndims == 2: # [batch_size, n_feature] if is_mean: mse = tf.reduce_mean(tf.reduce_mean(tf.squared_difference(output, target), 1)) else: mse = tf.reduce_mean(tf.reduce_sum(tf.squared_difference(output, target), 1)) elif output.get_shape().ndims == 4: # [batch_size, w, h, c] if is_mean: mse = tf.reduce_mean(tf.reduce_mean(tf.squared_difference(output, target), [1, 2, 3])) else: mse = tf.reduce_mean(tf.reduce_sum(tf.squared_difference(output, target), [1, 2, 3])) return mse
Example #3
Source File: cost.py From LapSRN-tensorflow with Apache License 2.0 | 6 votes |
def normalized_mean_square_error(output, target): """Return the TensorFlow expression of normalized mean-squre-error of two distributions. Parameters ---------- output : 2D or 4D tensor. target : 2D or 4D tensor. """ with tf.name_scope("mean_squared_error_loss"): if output.get_shape().ndims == 2: # [batch_size, n_feature] nmse_a = tf.sqrt(tf.reduce_sum(tf.squared_difference(output, target), axis=1)) nmse_b = tf.sqrt(tf.reduce_sum(tf.square(target), axis=1)) elif output.get_shape().ndims == 4: # [batch_size, w, h, c] nmse_a = tf.sqrt(tf.reduce_sum(tf.squared_difference(output, target), axis=[1,2,3])) nmse_b = tf.sqrt(tf.reduce_sum(tf.square(target), axis=[1,2,3])) nmse = tf.reduce_mean(nmse_a / nmse_b) return nmse
Example #4
Source File: preprocessor_test.py From object_detector_app with MIT License | 6 votes |
def testRandomFlipBoxes(self): boxes = self.createTestBoxes() # Case where the boxes are flipped. boxes_expected1 = self.expectedBoxesAfterMirroring() # Case where the boxes are not flipped. boxes_expected2 = boxes # After elementwise multiplication, the result should be all-zero since one # of them is all-zero. boxes_diff = tf.multiply( tf.squared_difference(boxes, boxes_expected1), tf.squared_difference(boxes, boxes_expected2)) expected_result = tf.zeros_like(boxes_diff) with self.test_session() as sess: (boxes_diff, expected_result) = sess.run([boxes_diff, expected_result]) self.assertAllEqual(boxes_diff, expected_result)
Example #5
Source File: preprocessor_test.py From Traffic-Rule-Violation-Detection-System with MIT License | 6 votes |
def testRandomVerticalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_vertical_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterUpDownFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #6
Source File: preprocessor_test.py From Traffic-Rule-Violation-Detection-System with MIT License | 6 votes |
def testRandomHorizontalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_horizontal_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterLeftRightFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #7
Source File: cycle_gan.py From fine-lm with MIT License | 6 votes |
def lossfn(real_input, fake_input, compress, hparams, lsgan, name): """Loss function.""" eps = 1e-12 with tf.variable_scope(name): d1 = discriminator(real_input, compress, hparams, "discriminator") d2 = discriminator(fake_input, compress, hparams, "discriminator", reuse=True) if lsgan: dloss = tf.reduce_mean( tf.squared_difference(d1, 0.9)) + tf.reduce_mean(tf.square(d2)) gloss = tf.reduce_mean(tf.squared_difference(d2, 0.9)) loss = (dloss + gloss)/2 else: # cross_entropy dloss = -tf.reduce_mean( tf.log(d1 + eps)) - tf.reduce_mean(tf.log(1 - d2 + eps)) gloss = -tf.reduce_mean(tf.log(d2 + eps)) loss = (dloss + gloss)/2 return loss
Example #8
Source File: preprocessor_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testRandomHorizontalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_horizontal_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterLeftRightFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #9
Source File: preprocessor_test.py From Hands-On-Machine-Learning-with-OpenCV-4 with MIT License | 6 votes |
def testRandomFlipBoxes(self): boxes = self.createTestBoxes() # Case where the boxes are flipped. boxes_expected1 = self.expectedBoxesAfterMirroring() # Case where the boxes are not flipped. boxes_expected2 = boxes # After elementwise multiplication, the result should be all-zero since one # of them is all-zero. boxes_diff = tf.multiply( tf.squared_difference(boxes, boxes_expected1), tf.squared_difference(boxes, boxes_expected2)) expected_result = tf.zeros_like(boxes_diff) with self.test_session() as sess: (boxes_diff, expected_result) = sess.run([boxes_diff, expected_result]) self.assertAllEqual(boxes_diff, expected_result)
Example #10
Source File: cycle_gan.py From training_results_v0.5 with Apache License 2.0 | 6 votes |
def lossfn(real_input, fake_input, compress, hparams, lsgan, name): """Loss function.""" eps = 1e-12 with tf.variable_scope(name): d1 = discriminator(real_input, compress, hparams, "discriminator") d2 = discriminator(fake_input, compress, hparams, "discriminator", reuse=True) if lsgan: dloss = tf.reduce_mean( tf.squared_difference(d1, 0.9)) + tf.reduce_mean(tf.square(d2)) gloss = tf.reduce_mean(tf.squared_difference(d2, 0.9)) loss = (dloss + gloss)/2 else: # cross_entropy dloss = -tf.reduce_mean( tf.log(d1 + eps)) - tf.reduce_mean(tf.log(1 - d2 + eps)) gloss = -tf.reduce_mean(tf.log(d2 + eps)) loss = (dloss + gloss)/2 return loss
Example #11
Source File: preprocessor_test.py From DOTA_models with Apache License 2.0 | 6 votes |
def testRandomFlipBoxes(self): boxes = self.createTestBoxes() # Case where the boxes are flipped. boxes_expected1 = self.expectedBoxesAfterMirroring() # Case where the boxes are not flipped. boxes_expected2 = boxes # After elementwise multiplication, the result should be all-zero since one # of them is all-zero. boxes_diff = tf.multiply( tf.squared_difference(boxes, boxes_expected1), tf.squared_difference(boxes, boxes_expected2)) expected_result = tf.zeros_like(boxes_diff) with self.test_session() as sess: (boxes_diff, expected_result) = sess.run([boxes_diff, expected_result]) self.assertAllEqual(boxes_diff, expected_result)
Example #12
Source File: preprocessor_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testRandomVerticalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_vertical_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterUpDownFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #13
Source File: preprocessor_test.py From yolo_v2 with Apache License 2.0 | 6 votes |
def testRandomHorizontalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_horizontal_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterLeftRightFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #14
Source File: preprocessor_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 6 votes |
def testRandomHorizontalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_horizontal_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterLeftRightFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #15
Source File: preprocessor_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 6 votes |
def testRandomVerticalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_vertical_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterUpDownFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #16
Source File: preprocessor_test.py From HereIsWally with MIT License | 6 votes |
def testRandomFlipBoxes(self): boxes = self.createTestBoxes() # Case where the boxes are flipped. boxes_expected1 = self.expectedBoxesAfterMirroring() # Case where the boxes are not flipped. boxes_expected2 = boxes # After elementwise multiplication, the result should be all-zero since one # of them is all-zero. boxes_diff = tf.multiply( tf.squared_difference(boxes, boxes_expected1), tf.squared_difference(boxes, boxes_expected2)) expected_result = tf.zeros_like(boxes_diff) with self.test_session() as sess: (boxes_diff, expected_result) = sess.run([boxes_diff, expected_result]) self.assertAllEqual(boxes_diff, expected_result)
Example #17
Source File: cycle_gan.py From BERT with Apache License 2.0 | 6 votes |
def lossfn(real_input, fake_input, compress, hparams, lsgan, name): """Loss function.""" eps = 1e-12 with tf.variable_scope(name): d1 = discriminator(real_input, compress, hparams, "discriminator") d2 = discriminator(fake_input, compress, hparams, "discriminator", reuse=True) if lsgan: dloss = tf.reduce_mean( tf.squared_difference(d1, 0.9)) + tf.reduce_mean(tf.square(d2)) gloss = tf.reduce_mean(tf.squared_difference(d2, 0.9)) loss = (dloss + gloss)/2 else: # cross_entropy dloss = -tf.reduce_mean( tf.log(d1 + eps)) - tf.reduce_mean(tf.log1p(eps - d2)) gloss = -tf.reduce_mean(tf.log(d2 + eps)) loss = (dloss + gloss)/2 return loss
Example #18
Source File: preprocessor_test.py From garbage-object-detection-tensorflow with MIT License | 6 votes |
def testRandomFlipBoxes(self): boxes = self.createTestBoxes() # Case where the boxes are flipped. boxes_expected1 = self.expectedBoxesAfterMirroring() # Case where the boxes are not flipped. boxes_expected2 = boxes # After elementwise multiplication, the result should be all-zero since one # of them is all-zero. boxes_diff = tf.multiply( tf.squared_difference(boxes, boxes_expected1), tf.squared_difference(boxes, boxes_expected2)) expected_result = tf.zeros_like(boxes_diff) with self.test_session() as sess: (boxes_diff, expected_result) = sess.run([boxes_diff, expected_result]) self.assertAllEqual(boxes_diff, expected_result)
Example #19
Source File: common_layers.py From BERT with Apache License 2.0 | 6 votes |
def layer_norm_compute(x, epsilon, scale, bias, layer_collection=None): """Layer norm raw computation.""" # Save these before they get converted to tensors by the casting below params = (scale, bias) epsilon, scale, bias = [cast_like(t, x) for t in [epsilon, scale, bias]] mean = tf.reduce_mean(x, axis=[-1], keepdims=True) variance = tf.reduce_mean( tf.squared_difference(x, mean), axis=[-1], keepdims=True) norm_x = (x - mean) * tf.rsqrt(variance + epsilon) output = norm_x * scale + bias return output
Example #20
Source File: vq_discrete.py From BERT with Apache License 2.0 | 6 votes |
def embedding_lookup(self, x, means): """Compute nearest neighbors and loss for training the embeddings. Args: x: Batch of encoder continuous latent states sliced/projected into shape [-1, num_blocks, block_dim]. means: Embedding means. Returns: The nearest neighbor in one hot form, the nearest neighbor itself, the commitment loss, embedding training loss. """ x_means_hot = self.nearest_neighbor(x, means) x_means_hot_flat = tf.reshape( x_means_hot, [-1, self.hparams.num_blocks, self.hparams.block_v_size]) x_means = tf.matmul(tf.transpose(x_means_hot_flat, perm=[1, 0, 2]), means) x_means = tf.transpose(x_means, [1, 0, 2]) q_loss = tf.reduce_mean( tf.squared_difference(tf.stop_gradient(x), x_means)) e_loss = tf.reduce_mean( tf.squared_difference(x, tf.stop_gradient(x_means))) return x_means_hot, x_means, q_loss, e_loss
Example #21
Source File: preprocessor_test.py From cartoonify with MIT License | 6 votes |
def testRandomVerticalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_vertical_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterUpDownFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #22
Source File: preprocessor_test.py From Person-Detection-and-Tracking with MIT License | 6 votes |
def testRandomHorizontalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_horizontal_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterLeftRightFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #23
Source File: preprocessor_test.py From cartoonify with MIT License | 6 votes |
def testRandomHorizontalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_horizontal_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterLeftRightFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #24
Source File: preprocessor_test.py From Person-Detection-and-Tracking with MIT License | 6 votes |
def testRandomVerticalFlipWithEmptyBoxes(self): preprocess_options = [(preprocessor.random_vertical_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createEmptyTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterUpDownFlip() boxes_expected = self.createEmptyTestBoxes() images_expected2 = images tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_, boxes_expected_) = sess.run([images_diff, images_diff_expected, boxes, boxes_expected]) self.assertAllClose(boxes_, boxes_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #25
Source File: preprocessor_test.py From Hands-On-Machine-Learning-with-OpenCV-4 with MIT License | 5 votes |
def testRandomHorizontalFlip(self): preprocess_options = [(preprocessor.random_horizontal_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterMirroring() boxes_expected1 = self.expectedBoxesAfterMirroring() images_expected2 = images boxes_expected2 = boxes tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] boxes_diff1 = tf.squared_difference(boxes, boxes_expected1) boxes_diff2 = tf.squared_difference(boxes, boxes_expected2) boxes_diff = tf.multiply(boxes_diff1, boxes_diff2) boxes_diff_expected = tf.zeros_like(boxes_diff) images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_diff_, boxes_diff_expected_) = sess.run([images_diff, images_diff_expected, boxes_diff, boxes_diff_expected]) self.assertAllClose(boxes_diff_, boxes_diff_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #26
Source File: preprocessor_test.py From cartoonify with MIT License | 5 votes |
def testRandomHorizontalFlip(self): preprocess_options = [(preprocessor.random_horizontal_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterLeftRightFlip() boxes_expected1 = self.expectedBoxesAfterLeftRightFlip() images_expected2 = images boxes_expected2 = boxes tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] boxes_diff1 = tf.squared_difference(boxes, boxes_expected1) boxes_diff2 = tf.squared_difference(boxes, boxes_expected2) boxes_diff = tf.multiply(boxes_diff1, boxes_diff2) boxes_diff_expected = tf.zeros_like(boxes_diff) images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_diff_, boxes_diff_expected_) = sess.run([images_diff, images_diff_expected, boxes_diff, boxes_diff_expected]) self.assertAllClose(boxes_diff_, boxes_diff_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #27
Source File: preprocessor_test.py From cartoonify with MIT License | 5 votes |
def testRandomVerticalFlip(self): preprocess_options = [(preprocessor.random_vertical_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterUpDownFlip() boxes_expected1 = self.expectedBoxesAfterUpDownFlip() images_expected2 = images boxes_expected2 = boxes tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] boxes_diff1 = tf.squared_difference(boxes, boxes_expected1) boxes_diff2 = tf.squared_difference(boxes, boxes_expected2) boxes_diff = tf.multiply(boxes_diff1, boxes_diff2) boxes_diff_expected = tf.zeros_like(boxes_diff) images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_diff_, boxes_diff_expected_) = sess.run([images_diff, images_diff_expected, boxes_diff, boxes_diff_expected]) self.assertAllClose(boxes_diff_, boxes_diff_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #28
Source File: ops.py From tfdeploy with MIT License | 5 votes |
def test_SquaredDifference(self): t = tf.squared_difference(*self.random((3, 4, 4), (3, 4, 4))) self.check(t)
Example #29
Source File: preprocessor_test.py From garbage-object-detection-tensorflow with MIT License | 5 votes |
def testRandomHorizontalFlip(self): preprocess_options = [(preprocessor.random_horizontal_flip, {})] images = self.expectedImagesAfterNormalization() boxes = self.createTestBoxes() tensor_dict = {fields.InputDataFields.image: images, fields.InputDataFields.groundtruth_boxes: boxes} images_expected1 = self.expectedImagesAfterMirroring() boxes_expected1 = self.expectedBoxesAfterMirroring() images_expected2 = images boxes_expected2 = boxes tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options) images = tensor_dict[fields.InputDataFields.image] boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes] boxes_diff1 = tf.squared_difference(boxes, boxes_expected1) boxes_diff2 = tf.squared_difference(boxes, boxes_expected2) boxes_diff = tf.multiply(boxes_diff1, boxes_diff2) boxes_diff_expected = tf.zeros_like(boxes_diff) images_diff1 = tf.squared_difference(images, images_expected1) images_diff2 = tf.squared_difference(images, images_expected2) images_diff = tf.multiply(images_diff1, images_diff2) images_diff_expected = tf.zeros_like(images_diff) with self.test_session() as sess: (images_diff_, images_diff_expected_, boxes_diff_, boxes_diff_expected_) = sess.run([images_diff, images_diff_expected, boxes_diff, boxes_diff_expected]) self.assertAllClose(boxes_diff_, boxes_diff_expected_) self.assertAllClose(images_diff_, images_diff_expected_)
Example #30
Source File: layers.py From neural_rerendering_in_the_wild with Apache License 2.0 | 5 votes |
def minibatch_mean_variance(x): """Computes the variance average. This is used by the discriminator as a form of batch discrimination. Args: x: nD tensor for which to compute variance average. Returns: a scalar, the mean variance of variable x. """ mean = tf.reduce_mean(x, 0, keepdims=True) vals = tf.sqrt(tf.reduce_mean(tf.squared_difference(x, mean), 0) + 1e-8) vals = tf.reduce_mean(vals) return vals