Python tensorflow.python.training.moving_averages.ExponentialMovingAverage() Examples
The following are 5
code examples of tensorflow.python.training.moving_averages.ExponentialMovingAverage().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.training.moving_averages
, or try the search function
.
Example #1
Source File: moving_average_optimizer.py From lambda-packs with MIT License | 6 votes |
def __init__(self, opt, average_decay=0.9999, num_updates=None, sequential_update=True): """Construct a new MovingAverageOptimizer. Args: opt: A tf.Optimizer that will be used to compute and apply gradients. average_decay: Float. Decay to use to maintain the moving averages of trained variables. See tf.train.ExponentialMovingAverage for details. num_updates: Optional count of number of updates applied to variables. See tf.train.ExponentialMovingAverage for details. sequential_update: Bool. If False, will compute the moving average at the same time as the model is updated, potentially doing benign data races. If True, will update the moving average after gradient updates. """ self._optimizer = opt self._ema = moving_averages.ExponentialMovingAverage( average_decay, num_updates=num_updates) self._variable_map = None self._sequential_update = sequential_update
Example #2
Source File: moving_average_optimizer.py From auto-alt-text-lambda-api with MIT License | 6 votes |
def __init__(self, opt, average_decay=0.9999, num_updates=None, sequential_update=True): """Construct a new MovingAverageOptimizer. Args: opt: A tf.Optimizer that will be used to compute and apply gradients. average_decay: Float. Decay to use to maintain the moving averages of trained variables. See tf.train.ExponentialMovingAverage for details. num_updates: Optional count of number of updates applied to variables. See tf.train.ExponentialMovingAverage for details. sequential_update: Bool. If False, will compute the moving average at the same time as the model is updated, potentially doing benign data races. If True, will update the moving average after gradient updates. """ self._optimizer = opt self._ema = moving_averages.ExponentialMovingAverage( average_decay, num_updates=num_updates) self._variable_map = None self._sequential_update = sequential_update
Example #3
Source File: moving_average_optimizer.py From deep_image_model with Apache License 2.0 | 6 votes |
def __init__(self, opt, average_decay=0.9999, num_updates=None, sequential_update=True): """Construct a new MovingAverageOptimizer. Args: opt: A tf.Optimizer that will be used to compute and apply gradients. average_decay: Float. Decay to use to maintain the moving averages of trained variables. See tf.train.ExponentialMovingAverage for details. num_updates: Optional count of number of updates applied to variables. See tf.train.ExponentialMovingAverage for details. sequential_update: Bool. If False, will compute the moving average at the same time as the model is updated, potentially doing benign data races. If True, will update the moving average after gradient updates. """ self._optimizer = opt self._ema = moving_averages.ExponentialMovingAverage( average_decay, num_updates=num_updates) self._variable_map = None self._sequential_update = sequential_update
Example #4
Source File: moving_average_optimizer.py From keras-lambda with MIT License | 6 votes |
def __init__(self, opt, average_decay=0.9999, num_updates=None, sequential_update=True): """Construct a new MovingAverageOptimizer. Args: opt: A tf.Optimizer that will be used to compute and apply gradients. average_decay: Float. Decay to use to maintain the moving averages of trained variables. See tf.train.ExponentialMovingAverage for details. num_updates: Optional count of number of updates applied to variables. See tf.train.ExponentialMovingAverage for details. sequential_update: Bool. If False, will compute the moving average at the same time as the model is updated, potentially doing benign data races. If True, will update the moving average after gradient updates. """ self._optimizer = opt self._ema = moving_averages.ExponentialMovingAverage( average_decay, num_updates=num_updates) self._variable_map = None self._sequential_update = sequential_update
Example #5
Source File: sparse_optimizers.py From rigl with Apache License 2.0 | 5 votes |
def __init__(self, optimizer, begin_step, end_step, frequency, drop_fraction=0.1, drop_fraction_anneal='constant', use_locking=False, grow_init='zeros', momentum=0.9, use_tpu=False, name='SparseMomentumOptimizer', stateless_seed_offset=0): super(SparseMomentumOptimizer, self).__init__( optimizer, begin_step, end_step, frequency, drop_fraction=drop_fraction, drop_fraction_anneal=drop_fraction_anneal, grow_init=grow_init, use_locking=use_locking, name='SparseMomentumOptimizer', stateless_seed_offset=stateless_seed_offset) self._ema_grads = moving_averages.ExponentialMovingAverage(decay=momentum) self._use_tpu = use_tpu