Python tensorflow.DataType() Examples
The following are 12
code examples of tensorflow.DataType().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: graph_builder.py From DOTA_models with Apache License 2.0 | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer()) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #2
Source File: graph_builder.py From yolo_v2 with Apache License 2.0 | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer()) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #3
Source File: graph_builder.py From Gun-Detector with Apache License 2.0 | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer()) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #4
Source File: graph_builder.py From Action_Recognition_Zoo with MIT License | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #5
Source File: graph_builder.py From ECO-pytorch with BSD 2-Clause "Simplified" License | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #6
Source File: graph_builder.py From hands-detection with MIT License | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer()) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #7
Source File: graph_builder.py From object_detection_kitti with Apache License 2.0 | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer()) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #8
Source File: graph_builder.py From object_detection_with_tensorflow with MIT License | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer()) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #9
Source File: graph_builder.py From AI_Reader with Apache License 2.0 | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #10
Source File: graph_builder.py From HumanRecognition with MIT License | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer()) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #11
Source File: graph_builder.py From g-tensorflow-models with Apache License 2.0 | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer()) return (self.variables[name + '_avg_var'] if return_average else self.params[name])
Example #12
Source File: graph_builder.py From multilabel-image-classification-tensorflow with MIT License | 4 votes |
def _AddParam(self, shape, dtype, name, initializer=None, return_average=False): """Add a model parameter w.r.t. we expect to compute gradients. _AddParam creates both regular parameters (usually for training) and averaged nodes (usually for inference). It returns one or the other based on the 'return_average' arg. Args: shape: int list, tensor shape of the parameter to create dtype: tf.DataType, data type of the parameter name: string, name of the parameter in the TF graph initializer: optional initializer for the paramter return_average: if False, return parameter otherwise return moving average Returns: parameter or averaged parameter """ if name not in self.params: step = tf.cast(self.GetStep(), tf.float32) # Put all parameters and their initializing ops in their own scope # irrespective of the current scope (training or eval). with tf.name_scope(self._param_scope): self.params[name] = tf.get_variable(name, shape, dtype, initializer) param = self.params[name] if initializer is not None: self.inits[name] = state_ops.init_variable(param, initializer) if self._averaging_decay == 1: logging.info('Using vanilla averaging of parameters.') ema = tf.train.ExponentialMovingAverage(decay=(step / (step + 1.0)), num_updates=None) else: ema = tf.train.ExponentialMovingAverage(decay=self._averaging_decay, num_updates=step) self._averaging[name + '_avg_update'] = ema.apply([param]) self.variables[name + '_avg_var'] = ema.average(param) self.inits[name + '_avg_init'] = state_ops.init_variable( ema.average(param), tf.zeros_initializer()) return (self.variables[name + '_avg_var'] if return_average else self.params[name])