Python object_detection.utils.learning_schedules.exponential_decay_with_burnin() Examples
The following are 30
code examples of object_detection.utils.learning_schedules.exponential_decay_with_burnin().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
object_detection.utils.learning_schedules
, or try the search function
.
Example #1
Source File: learning_schedules_test.py From object_detector_app with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #2
Source File: learning_schedules_test.py From multilabel-image-classification-tensorflow with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 min_learning_rate = .05 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps, min_learning_rate) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(9) ] exp_rates = [.5, .5, 1, 1, 1, .1, .1, .1, .05] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #3
Source File: learning_schedules_test.py From mtl-ssl with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #4
Source File: learning_schedules_test.py From motion-rcnn with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #5
Source File: learning_schedules_test.py From models with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 min_learning_rate = .05 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps, min_learning_rate) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(9) ] exp_rates = [.5, .5, 1, 1, 1, .1, .1, .1, .05] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #6
Source File: learning_schedules_test.py From g-tensorflow-models with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 min_learning_rate = .05 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps, min_learning_rate) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(9) ] exp_rates = [.5, .5, 1, 1, 1, .1, .1, .1, .05] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #7
Source File: learning_schedules_test.py From MAX-Object-Detector with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 min_learning_rate = .05 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps, min_learning_rate) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(9) ] exp_rates = [.5, .5, 1, 1, 1, .1, .1, .1, .05] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #8
Source File: learning_schedules_test.py From AniSeg with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(8) ] exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #9
Source File: learning_schedules_test.py From object_detection_with_tensorflow with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #10
Source File: learning_schedules_test.py From object_detection_with_tensorflow with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #11
Source File: learning_schedules_test.py From Elphas with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(8) ] exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #12
Source File: learning_schedules_test.py From MBMD with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #13
Source File: learning_schedules_test.py From object_detection_kitti with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #14
Source File: learning_schedules_test.py From Live-feed-object-device-identification-using-Tensorflow-and-OpenCV with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 min_learning_rate = .05 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps, min_learning_rate) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(9) ] exp_rates = [.5, .5, 1, 1, 1, .1, .1, .1, .05] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #15
Source File: learning_schedules_test.py From hands-detection with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #16
Source File: learning_schedules_test.py From moveo_ros with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #17
Source File: learning_schedules_test.py From BMW-TensorFlow-Training-GUI with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 min_learning_rate = .05 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps, min_learning_rate) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(9) ] exp_rates = [.5, .5, 1, 1, 1, .1, .1, .1, .05] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #18
Source File: learning_schedules_test.py From DOTA_models with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #19
Source File: learning_schedules_test.py From vehicle_counting_tensorflow with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 min_learning_rate = .05 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps, min_learning_rate) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(9) ] exp_rates = [.5, .5, 1, 1, 1, .1, .1, .1, .05] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #20
Source File: learning_schedules_test.py From ros_people_object_detection_tensorflow with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(8) ] exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #21
Source File: learning_schedules_test.py From Person-Detection-and-Tracking with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(8) ] exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #22
Source File: learning_schedules_test.py From garbage-object-detection-tensorflow with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #23
Source File: learning_schedules_test.py From HereIsWally with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #24
Source File: learning_schedules_test.py From yolo_v2 with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #25
Source File: learning_schedules_test.py From Traffic-Rule-Violation-Detection-System with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #26
Source File: learning_schedules_test.py From Hands-On-Machine-Learning-with-OpenCV-4 with MIT License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #27
Source File: learning_schedules_test.py From tensorflow with BSD 2-Clause "Simplified" License | 6 votes |
def testExponentialDecayWithBurnin(self): global_step = tf.placeholder(tf.int32, []) learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) with self.test_session() as sess: output_rates = [] for input_global_step in range(8): output_rate = sess.run(learning_rate, feed_dict={global_step: input_global_step}) output_rates.append(output_rate) self.assertAllClose(output_rates, exp_rates)
Example #28
Source File: learning_schedules_test.py From Gun-Detector with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(8) ] exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #29
Source File: learning_schedules_test.py From ros_tensorflow with Apache License 2.0 | 6 votes |
def testExponentialDecayWithBurnin(self): def graph_fn(global_step): learning_rate_base = 1.0 learning_rate_decay_steps = 3 learning_rate_decay_factor = .1 burnin_learning_rate = .5 burnin_steps = 2 learning_rate = learning_schedules.exponential_decay_with_burnin( global_step, learning_rate_base, learning_rate_decay_steps, learning_rate_decay_factor, burnin_learning_rate, burnin_steps) assert learning_rate.op.name.endswith('learning_rate') return (learning_rate,) output_rates = [ self.execute(graph_fn, [np.array(i).astype(np.int64)]) for i in range(8) ] exp_rates = [.5, .5, 1, .1, .1, .1, .01, .01] self.assertAllClose(output_rates, exp_rates, rtol=1e-4)
Example #30
Source File: optimizer_builder.py From MAX-Object-Detector with Apache License 2.0 | 4 votes |
def _create_learning_rate(learning_rate_config): """Create optimizer learning rate based on config. Args: learning_rate_config: A LearningRate proto message. Returns: A learning rate. Raises: ValueError: when using an unsupported input data type. """ learning_rate = None learning_rate_type = learning_rate_config.WhichOneof('learning_rate') if learning_rate_type == 'constant_learning_rate': config = learning_rate_config.constant_learning_rate learning_rate = tf.constant(config.learning_rate, dtype=tf.float32, name='learning_rate') if learning_rate_type == 'exponential_decay_learning_rate': config = learning_rate_config.exponential_decay_learning_rate learning_rate = learning_schedules.exponential_decay_with_burnin( tf.train.get_or_create_global_step(), config.initial_learning_rate, config.decay_steps, config.decay_factor, burnin_learning_rate=config.burnin_learning_rate, burnin_steps=config.burnin_steps, min_learning_rate=config.min_learning_rate, staircase=config.staircase) if learning_rate_type == 'manual_step_learning_rate': config = learning_rate_config.manual_step_learning_rate if not config.schedule: raise ValueError('Empty learning rate schedule.') learning_rate_step_boundaries = [x.step for x in config.schedule] learning_rate_sequence = [config.initial_learning_rate] learning_rate_sequence += [x.learning_rate for x in config.schedule] learning_rate = learning_schedules.manual_stepping( tf.train.get_or_create_global_step(), learning_rate_step_boundaries, learning_rate_sequence, config.warmup) if learning_rate_type == 'cosine_decay_learning_rate': config = learning_rate_config.cosine_decay_learning_rate learning_rate = learning_schedules.cosine_decay_with_warmup( tf.train.get_or_create_global_step(), config.learning_rate_base, config.total_steps, config.warmup_learning_rate, config.warmup_steps, config.hold_base_rate_steps) if learning_rate is None: raise ValueError('Learning_rate %s not supported.' % learning_rate_type) return learning_rate