Python object_detection.protos.optimizer_pb2.Optimizer() Examples

The following are 30 code examples of object_detection.protos.optimizer_pb2.Optimizer(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module object_detection.protos.optimizer_pb2 , or try the search function .
Example #1
Source File: optimizer_builder_test.py    From tensorflow with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
      moving_average_decay: 0.2
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
    # TODO: Find a way to not depend on the private members.
    self.assertAlmostEqual(optimizer._ema._decay, 0.2) 
Example #2
Source File: optimizer_builder_test.py    From ros_tensorflow with Apache License 2.0 6 votes vote down vote up
def testBuildMomentumOptimizer(self):
    optimizer_text_proto = """
      momentum_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.001
          }
        }
        momentum_optimizer_value: 0.99
      }
      use_moving_average: false
    """
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer, _ = optimizer_builder.build(optimizer_proto)
    self.assertTrue(isinstance(optimizer, tf.train.MomentumOptimizer)) 
Example #3
Source File: optimizer_builder_test.py    From moveo_ros with MIT License 6 votes vote down vote up
def testBuildMomentumOptimizer(self):
    optimizer_text_proto = """
      momentum_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.001
          }
        }
        momentum_optimizer_value: 0.99
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.MomentumOptimizer)) 
Example #4
Source File: optimizer_builder_test.py    From moveo_ros with MIT License 6 votes vote down vote up
def testBuildAdamOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.AdamOptimizer)) 
Example #5
Source File: optimizer_builder_test.py    From moveo_ros with MIT License 6 votes vote down vote up
def testBuildMovingAverageOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer)) 
Example #6
Source File: optimizer_builder_test.py    From moveo_ros with MIT License 6 votes vote down vote up
def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
      moving_average_decay: 0.2
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
    # TODO: Find a way to not depend on the private members.
    self.assertAlmostEqual(optimizer._ema._decay, 0.2) 
Example #7
Source File: optimizer_builder_test.py    From hands-detection with MIT License 6 votes vote down vote up
def testBuildRMSPropOptimizer(self):
    optimizer_text_proto = """
      rms_prop_optimizer: {
        learning_rate: {
          exponential_decay_learning_rate {
            initial_learning_rate: 0.004
            decay_steps: 800720
            decay_factor: 0.95
          }
        }
        momentum_optimizer_value: 0.9
        decay: 0.9
        epsilon: 1.0
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.RMSPropOptimizer)) 
Example #8
Source File: optimizer_builder_test.py    From hands-detection with MIT License 6 votes vote down vote up
def testBuildMomentumOptimizer(self):
    optimizer_text_proto = """
      momentum_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.001
          }
        }
        momentum_optimizer_value: 0.99
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.MomentumOptimizer)) 
Example #9
Source File: optimizer_builder_test.py    From hands-detection with MIT License 6 votes vote down vote up
def testBuildAdamOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.AdamOptimizer)) 
Example #10
Source File: optimizer_builder_test.py    From hands-detection with MIT License 6 votes vote down vote up
def testBuildMovingAverageOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer)) 
Example #11
Source File: optimizer_builder_test.py    From hands-detection with MIT License 6 votes vote down vote up
def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
      moving_average_decay: 0.2
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
    # TODO: Find a way to not depend on the private members.
    self.assertAlmostEqual(optimizer._ema._decay, 0.2) 
Example #12
Source File: optimizer_builder_test.py    From moveo_ros with MIT License 6 votes vote down vote up
def testBuildRMSPropOptimizer(self):
    optimizer_text_proto = """
      rms_prop_optimizer: {
        learning_rate: {
          exponential_decay_learning_rate {
            initial_learning_rate: 0.004
            decay_steps: 800720
            decay_factor: 0.95
          }
        }
        momentum_optimizer_value: 0.9
        decay: 0.9
        epsilon: 1.0
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.RMSPropOptimizer)) 
Example #13
Source File: optimizer_builder_test.py    From Gun-Detector with Apache License 2.0 6 votes vote down vote up
def testBuildMovingAverageOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
    """
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer, _ = optimizer_builder.build(optimizer_proto)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer)) 
Example #14
Source File: optimizer_builder_test.py    From Gun-Detector with Apache License 2.0 6 votes vote down vote up
def testBuildMomentumOptimizer(self):
    optimizer_text_proto = """
      momentum_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.001
          }
        }
        momentum_optimizer_value: 0.99
      }
      use_moving_average: false
    """
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer, _ = optimizer_builder.build(optimizer_proto)
    self.assertTrue(isinstance(optimizer, tf.train.MomentumOptimizer)) 
Example #15
Source File: optimizer_builder_test.py    From Gun-Detector with Apache License 2.0 6 votes vote down vote up
def testBuildRMSPropOptimizer(self):
    optimizer_text_proto = """
      rms_prop_optimizer: {
        learning_rate: {
          exponential_decay_learning_rate {
            initial_learning_rate: 0.004
            decay_steps: 800720
            decay_factor: 0.95
          }
        }
        momentum_optimizer_value: 0.9
        decay: 0.9
        epsilon: 1.0
      }
      use_moving_average: false
    """
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer, _ = optimizer_builder.build(optimizer_proto)
    self.assertTrue(isinstance(optimizer, tf.train.RMSPropOptimizer)) 
Example #16
Source File: optimizer_builder_test.py    From Gun-Detector with Apache License 2.0 6 votes vote down vote up
def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
      moving_average_decay: 0.2
    """
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer, _ = optimizer_builder.build(optimizer_proto)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
    # TODO(rathodv): Find a way to not depend on the private members.
    self.assertAlmostEqual(optimizer._ema._decay, 0.2) 
Example #17
Source File: optimizer_builder_test.py    From tensorflow with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def testBuildMovingAverageOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer)) 
Example #18
Source File: optimizer_builder_test.py    From tensorflow with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def testBuildAdamOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.AdamOptimizer)) 
Example #19
Source File: optimizer_builder_test.py    From tensorflow with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def testBuildMomentumOptimizer(self):
    optimizer_text_proto = """
      momentum_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.001
          }
        }
        momentum_optimizer_value: 0.99
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.MomentumOptimizer)) 
Example #20
Source File: optimizer_builder_test.py    From tensorflow with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def testBuildRMSPropOptimizer(self):
    optimizer_text_proto = """
      rms_prop_optimizer: {
        learning_rate: {
          exponential_decay_learning_rate {
            initial_learning_rate: 0.004
            decay_steps: 800720
            decay_factor: 0.95
          }
        }
        momentum_optimizer_value: 0.9
        decay: 0.9
        epsilon: 1.0
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.RMSPropOptimizer)) 
Example #21
Source File: optimizer_builder_test.py    From Hands-On-Machine-Learning-with-OpenCV-4 with MIT License 6 votes vote down vote up
def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
      moving_average_decay: 0.2
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
    # TODO: Find a way to not depend on the private members.
    self.assertAlmostEqual(optimizer._ema._decay, 0.2) 
Example #22
Source File: optimizer_builder_test.py    From Hands-On-Machine-Learning-with-OpenCV-4 with MIT License 6 votes vote down vote up
def testBuildMovingAverageOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer)) 
Example #23
Source File: optimizer_builder_test.py    From Hands-On-Machine-Learning-with-OpenCV-4 with MIT License 6 votes vote down vote up
def testBuildAdamOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.AdamOptimizer)) 
Example #24
Source File: optimizer_builder_test.py    From Hands-On-Machine-Learning-with-OpenCV-4 with MIT License 6 votes vote down vote up
def testBuildMomentumOptimizer(self):
    optimizer_text_proto = """
      momentum_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.001
          }
        }
        momentum_optimizer_value: 0.99
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.MomentumOptimizer)) 
Example #25
Source File: optimizer_builder_test.py    From Hands-On-Machine-Learning-with-OpenCV-4 with MIT License 6 votes vote down vote up
def testBuildRMSPropOptimizer(self):
    optimizer_text_proto = """
      rms_prop_optimizer: {
        learning_rate: {
          exponential_decay_learning_rate {
            initial_learning_rate: 0.004
            decay_steps: 800720
            decay_factor: 0.95
          }
        }
        momentum_optimizer_value: 0.9
        decay: 0.9
        epsilon: 1.0
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.RMSPropOptimizer)) 
Example #26
Source File: optimizer_builder_test.py    From Traffic-Rule-Violation-Detection-System with MIT License 6 votes vote down vote up
def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
      moving_average_decay: 0.2
    """
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer, _ = optimizer_builder.build(optimizer_proto)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
    # TODO: Find a way to not depend on the private members.
    self.assertAlmostEqual(optimizer._ema._decay, 0.2) 
Example #27
Source File: optimizer_builder_test.py    From Traffic-Rule-Violation-Detection-System with MIT License 6 votes vote down vote up
def testBuildMovingAverageOptimizer(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
    """
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer, _ = optimizer_builder.build(optimizer_proto)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer)) 
Example #28
Source File: optimizer_builder_test.py    From Traffic-Rule-Violation-Detection-System with MIT License 6 votes vote down vote up
def testBuildMomentumOptimizer(self):
    optimizer_text_proto = """
      momentum_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.001
          }
        }
        momentum_optimizer_value: 0.99
      }
      use_moving_average: false
    """
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer, _ = optimizer_builder.build(optimizer_proto)
    self.assertTrue(isinstance(optimizer, tf.train.MomentumOptimizer)) 
Example #29
Source File: optimizer_builder_test.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def testBuildRMSPropOptimizer(self):
    optimizer_text_proto = """
      rms_prop_optimizer: {
        learning_rate: {
          exponential_decay_learning_rate {
            initial_learning_rate: 0.004
            decay_steps: 800720
            decay_factor: 0.95
          }
        }
        momentum_optimizer_value: 0.9
        decay: 0.9
        epsilon: 1.0
      }
      use_moving_average: false
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(isinstance(optimizer, tf.train.RMSPropOptimizer)) 
Example #30
Source File: optimizer_builder_test.py    From yolo_v2 with Apache License 2.0 6 votes vote down vote up
def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
    optimizer_text_proto = """
      adam_optimizer: {
        learning_rate: {
          constant_learning_rate {
            learning_rate: 0.002
          }
        }
      }
      use_moving_average: True
      moving_average_decay: 0.2
    """
    global_summaries = set([])
    optimizer_proto = optimizer_pb2.Optimizer()
    text_format.Merge(optimizer_text_proto, optimizer_proto)
    optimizer = optimizer_builder.build(optimizer_proto, global_summaries)
    self.assertTrue(
        isinstance(optimizer, tf.contrib.opt.MovingAverageOptimizer))
    # TODO(rathodv): Find a way to not depend on the private members.
    self.assertAlmostEqual(optimizer._ema._decay, 0.2)