Python object_detection.protos.pipeline_pb2.TrainEvalPipelineConfig() Examples
The following are 30
code examples of object_detection.protos.pipeline_pb2.TrainEvalPipelineConfig().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
object_detection.protos.pipeline_pb2
, or try the search function
.
Example #1
Source File: exporter_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_export_graph_with_image_tensor_input(self): tmp_dir = self.get_temp_dir() trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt') self._save_checkpoint_from_mock_model(trained_checkpoint_prefix, use_moving_averages=False) with mock.patch.object( model_builder, 'build', autospec=True) as mock_builder: mock_builder.return_value = FakeModel() output_directory = os.path.join(tmp_dir, 'output') pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_config.use_moving_averages = False exporter.export_inference_graph( input_type='image_tensor', pipeline_config=pipeline_config, trained_checkpoint_prefix=trained_checkpoint_prefix, output_directory=output_directory) self.assertTrue(os.path.exists(os.path.join( output_directory, 'saved_model', 'saved_model.pb')))
Example #2
Source File: train.py From object_detector_app with MIT License | 6 votes |
def get_configs_from_pipeline_file(): """Reads training configuration from a pipeline_pb2.TrainEvalPipelineConfig. Reads training config from file specified by pipeline_config_path flag. Returns: model_config: model_pb2.DetectionModel train_config: train_pb2.TrainConfig input_config: input_reader_pb2.InputReader """ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() with tf.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f: text_format.Merge(f.read(), pipeline_config) model_config = pipeline_config.model train_config = pipeline_config.train_config input_config = pipeline_config.train_input_reader return model_config, train_config, input_config
Example #3
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testNewFocalLossParameters(self): """Tests that the loss weight ratio is updated appropriately.""" original_alpha = 1.0 original_gamma = 1.0 new_alpha = 0.3 new_gamma = 2.0 hparams = tf.contrib.training.HParams( focal_loss_alpha=new_alpha, focal_loss_gamma=new_gamma) pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() classification_loss = pipeline_config.model.ssd.loss.classification_loss classification_loss.weighted_sigmoid_focal.alpha = original_alpha classification_loss.weighted_sigmoid_focal.gamma = original_gamma _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) configs = config_util.merge_external_params_with_configs(configs, hparams) classification_loss = configs["model"].ssd.loss.classification_loss self.assertAlmostEqual(new_alpha, classification_loss.weighted_sigmoid_focal.alpha) self.assertAlmostEqual(new_gamma, classification_loss.weighted_sigmoid_focal.gamma)
Example #4
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testMergingKeywordArguments(self): """Tests that keyword arguments get merged as do hyperparameters.""" original_num_train_steps = 100 desired_num_train_steps = 10 pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.train_config.num_steps = original_num_train_steps _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"train_steps": desired_num_train_steps} configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict) train_steps = configs["train_config"].num_steps self.assertEqual(desired_num_train_steps, train_steps)
Example #5
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testNewTrainInputPath(self): """Tests that train input path can be overwritten with single file.""" original_train_path = ["path/to/data"] new_train_path = "another/path/to/data" pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() reader_config = pipeline_config.train_input_reader.tf_record_input_reader reader_config.input_path.extend(original_train_path) _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"train_input_path": new_train_path} configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict) reader_config = configs["train_input_config"].tf_record_input_reader final_path = reader_config.input_path self.assertEqual([new_train_path], final_path)
Example #6
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testNewTrainInputPathList(self): """Tests that train input path can be overwritten with multiple files.""" original_train_path = ["path/to/data"] new_train_path = ["another/path/to/data", "yet/another/path/to/data"] pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() reader_config = pipeline_config.train_input_reader.tf_record_input_reader reader_config.input_path.extend(original_train_path) _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"train_input_path": new_train_path} configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict) reader_config = configs["train_input_config"].tf_record_input_reader final_path = reader_config.input_path self.assertEqual(new_train_path, final_path)
Example #7
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testDontOverwriteEmptyLabelMapPath(self): """Tests that label map path will not by overwritten with empty string.""" original_label_map_path = "path/to/original/label_map" new_label_map_path = "" pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() train_input_reader = pipeline_config.train_input_reader train_input_reader.label_map_path = original_label_map_path eval_input_reader = pipeline_config.eval_input_reader.add() eval_input_reader.label_map_path = original_label_map_path _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"label_map_path": new_label_map_path} configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict) self.assertEqual(original_label_map_path, configs["train_input_config"].label_map_path) self.assertEqual(original_label_map_path, configs["eval_input_configs"][0].label_map_path)
Example #8
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testNewMaskType(self): """Tests that mask type can be overwritten in input readers.""" original_mask_type = input_reader_pb2.NUMERICAL_MASKS new_mask_type = input_reader_pb2.PNG_MASKS pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() train_input_reader = pipeline_config.train_input_reader train_input_reader.mask_type = original_mask_type eval_input_reader = pipeline_config.eval_input_reader.add() eval_input_reader.mask_type = original_mask_type _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"mask_type": new_mask_type} configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict) self.assertEqual(new_mask_type, configs["train_input_config"].mask_type) self.assertEqual(new_mask_type, configs["eval_input_configs"][0].mask_type)
Example #9
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testTrainShuffle(self): """Tests that `train_shuffle` keyword arguments are applied correctly.""" original_shuffle = True desired_shuffle = False pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.train_input_reader.shuffle = original_shuffle _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"train_shuffle": desired_shuffle} configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict) train_shuffle = configs["train_input_config"].shuffle self.assertEqual(desired_shuffle, train_shuffle)
Example #10
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testOverwriteAllEvalNumEpochs(self): original_num_epochs = 10 new_num_epochs = 1 pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_input_reader.add().num_epochs = original_num_epochs pipeline_config.eval_input_reader.add().num_epochs = original_num_epochs _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"eval_num_epochs": new_num_epochs} configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict) for eval_input_config in configs["eval_input_configs"]: self.assertEqual(new_num_epochs, eval_input_config.num_epochs)
Example #11
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testUpdateMaskTypeForAllInputConfigs(self): original_mask_type = input_reader_pb2.NUMERICAL_MASKS new_mask_type = input_reader_pb2.PNG_MASKS pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() train_config = pipeline_config.train_input_reader train_config.mask_type = original_mask_type eval_1 = pipeline_config.eval_input_reader.add() eval_1.mask_type = original_mask_type eval_1.name = "eval_1" eval_2 = pipeline_config.eval_input_reader.add() eval_2.mask_type = original_mask_type eval_2.name = "eval_2" _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"mask_type": new_mask_type} configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict) self.assertEqual(configs["train_input_config"].mask_type, new_mask_type) for eval_input_config in configs["eval_input_configs"]: self.assertEqual(eval_input_config.mask_type, new_mask_type)
Example #12
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testErrorOverwritingMultipleInputConfig(self): original_shuffle = False new_shuffle = True pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() eval_1 = pipeline_config.eval_input_reader.add() eval_1.shuffle = original_shuffle eval_1.name = "eval_1" eval_2 = pipeline_config.eval_input_reader.add() eval_2.shuffle = original_shuffle eval_2.name = "eval_2" _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"eval_shuffle": new_shuffle} with self.assertRaises(ValueError): configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict)
Example #13
Source File: config_util.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def get_configs_from_pipeline_file(pipeline_config_path, config_override=None): """Reads config from a file containing pipeline_pb2.TrainEvalPipelineConfig. Args: pipeline_config_path: Path to pipeline_pb2.TrainEvalPipelineConfig text proto. config_override: A pipeline_pb2.TrainEvalPipelineConfig text proto to override pipeline_config_path. Returns: Dictionary of configuration objects. Keys are `model`, `train_config`, `train_input_config`, `eval_config`, `eval_input_config`. Value are the corresponding config objects. """ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() with tf.gfile.GFile(pipeline_config_path, "r") as f: proto_str = f.read() text_format.Merge(proto_str, pipeline_config) if config_override: text_format.Merge(config_override, pipeline_config) return create_configs_from_pipeline_proto(pipeline_config)
Example #14
Source File: config_util.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def create_pipeline_proto_from_configs(configs): """Creates a pipeline_pb2.TrainEvalPipelineConfig from configs dictionary. This function performs the inverse operation of create_configs_from_pipeline_proto(). Args: configs: Dictionary of configs. See get_configs_from_pipeline_file(). Returns: A fully populated pipeline_pb2.TrainEvalPipelineConfig. """ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.model.CopyFrom(configs["model"]) pipeline_config.train_config.CopyFrom(configs["train_config"]) pipeline_config.train_input_reader.CopyFrom(configs["train_input_config"]) pipeline_config.eval_config.CopyFrom(configs["eval_config"]) pipeline_config.eval_input_reader.extend(configs["eval_input_configs"]) if "graph_rewriter_config" in configs: pipeline_config.graph_rewriter.CopyFrom(configs["graph_rewriter_config"]) return pipeline_config
Example #15
Source File: config_util.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def save_pipeline_config(pipeline_config, directory): """Saves a pipeline config text file to disk. Args: pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig. directory: The model directory into which the pipeline config file will be saved. """ if not file_io.file_exists(directory): file_io.recursive_create_dir(directory) pipeline_config_path = os.path.join(directory, "pipeline.config") config_text = text_format.MessageToString(pipeline_config) with tf.gfile.Open(pipeline_config_path, "wb") as f: tf.logging.info("Writing pipeline config file to %s", pipeline_config_path) f.write(config_text)
Example #16
Source File: config_util_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testOverwriteAllEvalSampling(self): original_num_eval_examples = 1 new_num_eval_examples = 10 pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_input_reader.add().sample_1_of_n_examples = ( original_num_eval_examples) pipeline_config.eval_input_reader.add().sample_1_of_n_examples = ( original_num_eval_examples) _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) override_dict = {"sample_1_of_n_eval_examples": new_num_eval_examples} configs = config_util.merge_external_params_with_configs( configs, kwargs_dict=override_dict) for eval_input_config in configs["eval_input_configs"]: self.assertEqual(new_num_eval_examples, eval_input_config.sample_1_of_n_examples)
Example #17
Source File: exporter_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_write_inference_graph(self): tmp_dir = self.get_temp_dir() trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt') self._save_checkpoint_from_mock_model(trained_checkpoint_prefix, use_moving_averages=False) with mock.patch.object( model_builder, 'build', autospec=True) as mock_builder: mock_builder.return_value = FakeModel() output_directory = os.path.join(tmp_dir, 'output') pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_config.use_moving_averages = False exporter.export_inference_graph( input_type='image_tensor', pipeline_config=pipeline_config, trained_checkpoint_prefix=trained_checkpoint_prefix, output_directory=output_directory, write_inference_graph=True) self.assertTrue(os.path.exists(os.path.join( output_directory, 'inference_graph.pbtxt')))
Example #18
Source File: exporter_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_export_graph_with_tf_example_input(self): tmp_dir = self.get_temp_dir() trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt') self._save_checkpoint_from_mock_model(trained_checkpoint_prefix, use_moving_averages=False) with mock.patch.object( model_builder, 'build', autospec=True) as mock_builder: mock_builder.return_value = FakeModel() output_directory = os.path.join(tmp_dir, 'output') pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_config.use_moving_averages = False exporter.export_inference_graph( input_type='tf_example', pipeline_config=pipeline_config, trained_checkpoint_prefix=trained_checkpoint_prefix, output_directory=output_directory) self.assertTrue(os.path.exists(os.path.join( output_directory, 'saved_model', 'saved_model.pb')))
Example #19
Source File: exporter_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_export_graph_with_moving_averages(self): tmp_dir = self.get_temp_dir() trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt') self._save_checkpoint_from_mock_model(trained_checkpoint_prefix, use_moving_averages=True) output_directory = os.path.join(tmp_dir, 'output') with mock.patch.object( model_builder, 'build', autospec=True) as mock_builder: mock_builder.return_value = FakeModel() pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_config.use_moving_averages = True exporter.export_inference_graph( input_type='image_tensor', pipeline_config=pipeline_config, trained_checkpoint_prefix=trained_checkpoint_prefix, output_directory=output_directory) self.assertTrue(os.path.exists(os.path.join( output_directory, 'saved_model', 'saved_model.pb'))) expected_variables = set(['conv2d/bias', 'conv2d/kernel', 'global_step']) actual_variables = set( [var_name for var_name, _ in tf.train.list_variables(output_directory)]) self.assertTrue(expected_variables.issubset(actual_variables))
Example #20
Source File: exporter_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_export_graph_saves_pipeline_file(self): tmp_dir = self.get_temp_dir() trained_checkpoint_prefix = os.path.join(tmp_dir, 'model.ckpt') self._save_checkpoint_from_mock_model(trained_checkpoint_prefix, use_moving_averages=True) output_directory = os.path.join(tmp_dir, 'output') with mock.patch.object( model_builder, 'build', autospec=True) as mock_builder: mock_builder.return_value = FakeModel() pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() exporter.export_inference_graph( input_type='image_tensor', pipeline_config=pipeline_config, trained_checkpoint_prefix=trained_checkpoint_prefix, output_directory=output_directory) expected_pipeline_path = os.path.join( output_directory, 'pipeline.config') self.assertTrue(os.path.exists(expected_pipeline_path)) written_pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() with tf.gfile.GFile(expected_pipeline_path, 'r') as f: proto_str = f.read() text_format.Merge(proto_str, written_pipeline_config) self.assertProtoEquals(pipeline_config, written_pipeline_config)
Example #21
Source File: export_tflite_ssd_graph_lib_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_export_tflite_graph_with_moving_averages(self): pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_config.use_moving_averages = True pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.height = 10 pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.width = 10 pipeline_config.model.ssd.num_classes = 2 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.y_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.x_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.height_scale = 5.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.width_scale = 5.0 tflite_graph_file = self._export_graph(pipeline_config) self.assertTrue(os.path.exists(tflite_graph_file)) (box_encodings_np, class_predictions_np ) = self._import_graph_and_run_inference(tflite_graph_file) self.assertAllClose(box_encodings_np, [[[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 0.8, 0.8]]]) self.assertAllClose(class_predictions_np, [[[0.7, 0.6], [0.9, 0.0]]])
Example #22
Source File: export_tflite_ssd_graph_lib_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_export_tflite_graph_without_moving_averages(self): pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_config.use_moving_averages = False pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.height = 10 pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.width = 10 pipeline_config.model.ssd.num_classes = 2 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.y_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.x_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.height_scale = 5.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.width_scale = 5.0 tflite_graph_file = self._export_graph(pipeline_config) self.assertTrue(os.path.exists(tflite_graph_file)) (box_encodings_np, class_predictions_np ) = self._import_graph_and_run_inference(tflite_graph_file) self.assertAllClose(box_encodings_np, [[[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 0.8, 0.8]]]) self.assertAllClose(class_predictions_np, [[[0.7, 0.6], [0.9, 0.0]]])
Example #23
Source File: export_tflite_ssd_graph_lib_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_export_tflite_graph_grayscale(self): pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_config.use_moving_averages = False pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.height = 10 pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.width = 10 (pipeline_config.model.ssd.image_resizer.fixed_shape_resizer ).convert_to_grayscale = True pipeline_config.model.ssd.num_classes = 2 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.y_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.x_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.height_scale = 5.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.width_scale = 5.0 tflite_graph_file = self._export_graph(pipeline_config, num_channels=1) self.assertTrue(os.path.exists(tflite_graph_file)) (box_encodings_np, class_predictions_np) = self._import_graph_and_run_inference( tflite_graph_file, num_channels=1) self.assertAllClose(box_encodings_np, [[[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 0.8, 0.8]]]) self.assertAllClose(class_predictions_np, [[[0.7, 0.6], [0.9, 0.0]]])
Example #24
Source File: export_tflite_ssd_graph_lib_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_export_tflite_graph_with_quantization(self): pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_config.use_moving_averages = False pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.height = 10 pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.width = 10 pipeline_config.graph_rewriter.quantization.delay = 500000 pipeline_config.model.ssd.num_classes = 2 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.y_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.x_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.height_scale = 5.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.width_scale = 5.0 tflite_graph_file = self._export_graph(pipeline_config) self.assertTrue(os.path.exists(tflite_graph_file)) self._assert_quant_vars_exists(tflite_graph_file) (box_encodings_np, class_predictions_np ) = self._import_graph_and_run_inference(tflite_graph_file) self.assertAllClose(box_encodings_np, [[[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 0.8, 0.8]]]) self.assertAllClose(class_predictions_np, [[[0.7, 0.6], [0.9, 0.0]]])
Example #25
Source File: export_tflite_ssd_graph_lib_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def test_export_tflite_graph_with_softmax_score_conversion(self): pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.eval_config.use_moving_averages = False pipeline_config.model.ssd.post_processing.score_converter = ( post_processing_pb2.PostProcessing.SOFTMAX) pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.height = 10 pipeline_config.model.ssd.image_resizer.fixed_shape_resizer.width = 10 pipeline_config.model.ssd.num_classes = 2 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.y_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.x_scale = 10.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.height_scale = 5.0 pipeline_config.model.ssd.box_coder.faster_rcnn_box_coder.width_scale = 5.0 tflite_graph_file = self._export_graph(pipeline_config) self.assertTrue(os.path.exists(tflite_graph_file)) (box_encodings_np, class_predictions_np ) = self._import_graph_and_run_inference(tflite_graph_file) self.assertAllClose(box_encodings_np, [[[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 0.8, 0.8]]]) self.assertAllClose(class_predictions_np, [[[0.524979, 0.475021], [0.710949, 0.28905]]])
Example #26
Source File: config_util_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 6 votes |
def test_get_configs_from_pipeline_file(self): """Test that proto configs can be read from pipeline config file.""" pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.model.faster_rcnn.num_classes = 10 pipeline_config.train_config.batch_size = 32 pipeline_config.train_input_reader.label_map_path = "path/to/label_map" pipeline_config.eval_config.num_examples = 20 pipeline_config.eval_input_reader.queue_capacity = 100 _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) self.assertProtoEquals(pipeline_config.model, configs["model"]) self.assertProtoEquals(pipeline_config.train_config, configs["train_config"]) self.assertProtoEquals(pipeline_config.train_input_reader, configs["train_input_config"]) self.assertProtoEquals(pipeline_config.eval_config, configs["eval_config"]) self.assertProtoEquals(pipeline_config.eval_input_reader, configs["eval_input_config"])
Example #27
Source File: config_util_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 6 votes |
def test_create_pipeline_proto_from_configs(self): """Tests that proto can be reconstructed from configs dictionary.""" pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.model.faster_rcnn.num_classes = 10 pipeline_config.train_config.batch_size = 32 pipeline_config.train_input_reader.label_map_path = "path/to/label_map" pipeline_config.eval_config.num_examples = 20 pipeline_config.eval_input_reader.queue_capacity = 100 _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) pipeline_config_reconstructed = ( config_util.create_pipeline_proto_from_configs(configs)) self.assertEqual(pipeline_config, pipeline_config_reconstructed)
Example #28
Source File: config_util_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 6 votes |
def testNewMomentumOptimizerValue(self): """Tests that new momentum value is updated appropriately.""" original_momentum_value = 0.4 hparams = tf.contrib.training.HParams(momentum_optimizer_value=1.1) pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() optimizer_config = pipeline_config.train_config.optimizer.rms_prop_optimizer optimizer_config.momentum_optimizer_value = original_momentum_value _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) configs = config_util.merge_external_params_with_configs(configs, hparams) optimizer_config = configs["train_config"].optimizer.rms_prop_optimizer new_momentum_value = optimizer_config.momentum_optimizer_value self.assertAlmostEqual(1.0, new_momentum_value) # Clipped to 1.0.
Example #29
Source File: config_util_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 6 votes |
def testNewFocalLossParameters(self): """Tests that the loss weight ratio is updated appropriately.""" original_alpha = 1.0 original_gamma = 1.0 new_alpha = 0.3 new_gamma = 2.0 hparams = tf.contrib.training.HParams( focal_loss_alpha=new_alpha, focal_loss_gamma=new_gamma) pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() classification_loss = pipeline_config.model.ssd.loss.classification_loss classification_loss.weighted_sigmoid_focal.alpha = original_alpha classification_loss.weighted_sigmoid_focal.gamma = original_gamma _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) configs = config_util.merge_external_params_with_configs(configs, hparams) classification_loss = configs["model"].ssd.loss.classification_loss self.assertAlmostEqual(new_alpha, classification_loss.weighted_sigmoid_focal.alpha) self.assertAlmostEqual(new_gamma, classification_loss.weighted_sigmoid_focal.gamma)
Example #30
Source File: config_util_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 6 votes |
def testMergingKeywordArguments(self): """Tests that keyword arguments get merged as do hyperparameters.""" original_num_train_steps = 100 original_num_eval_steps = 5 desired_num_train_steps = 10 desired_num_eval_steps = 1 pipeline_config_path = os.path.join(self.get_temp_dir(), "pipeline.config") pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() pipeline_config.train_config.num_steps = original_num_train_steps pipeline_config.eval_config.num_examples = original_num_eval_steps _write_config(pipeline_config, pipeline_config_path) configs = config_util.get_configs_from_pipeline_file(pipeline_config_path) configs = config_util.merge_external_params_with_configs( configs, train_steps=desired_num_train_steps, eval_steps=desired_num_eval_steps) train_steps = configs["train_config"].num_steps eval_steps = configs["eval_config"].num_examples self.assertEqual(desired_num_train_steps, train_steps) self.assertEqual(desired_num_eval_steps, eval_steps)