Python tensorflow.Operations() Examples
The following are 6
code examples of tensorflow.Operations().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow
, or try the search function
.
Example #1
Source File: tensorflow_translator.py From eran with Apache License 2.0 | 6 votes |
def matmul_resources(self, op): """ checks which one of the direct ancestor tf.Operations is a constant and returns the underlying tensor as a numpy.ndarray inside a tuple. The matrix is manipulated in a way that it can be used as the left multiplier in the matrix multiplication. Arguments --------- op : tf.Operation must have type "MatMul" Return ------ output : tuple tuple with the matrix (of type numpy.ndarray) as its only item """ inputs = op.inputs left = inputs[0] right = inputs[1] if left.op.type == "Const": matrix = self.sess.run(left) if not op.get_attr("transpose_a") else self.sess.run(left).transpose() else: matrix = self.sess.run(right).transpose() if not op.get_attr("transpose_b") else self.sess.run(right) return (matrix,)
Example #2
Source File: tensorflow_translator.py From eran with Apache License 2.0 | 6 votes |
def add_resources(self, op): """ checks which one of the direct ancestor tf.Operations is a constant and returns the underlying tensor as a numpy.ndarray inside a tuple. Arguments --------- op : tf.Operation must have type "Add" Return ------ output : tuple tuple with the addend (of type numpy.ndarray) as its only item """ inputs = op.inputs left = inputs[0] right = inputs[1] if left.op.type == "Const": addend = self.sess.run(left) else: addend = self.sess.run(right) return (addend,)
Example #3
Source File: gamma_mapper.py From g-tensorflow-models with Apache License 2.0 | 6 votes |
def _dfs(op, visited=None): """Perform DFS on a graph. Args: op: A tf.Operation, the root node for the DFS. visited: A set, used in the recursion. Returns: A list of the tf.Operations of type Conv2D that were encountered. """ visited = visited or set() ret = [] for child in op.inputs: if child.op in visited: return ret visited.add(child.op) if child.op.type not in op_regularizer_manager.NON_PASS_THROUGH_OPS: ret.extend(_dfs(child.op, visited)) if child.op.type in ('Conv2D',): # TODO: support depthwise conv. ret.append(child.op) return ret
Example #4
Source File: gamma_mapper.py From multilabel-image-classification-tensorflow with MIT License | 6 votes |
def _dfs(op, visited=None): """Perform DFS on a graph. Args: op: A tf.Operation, the root node for the DFS. visited: A set, used in the recursion. Returns: A list of the tf.Operations of type Conv2D that were encountered. """ visited = visited or set() ret = [] for child in op.inputs: if child.op in visited: return ret visited.add(child.op) if child.op.type not in op_regularizer_manager.NON_PASS_THROUGH_OPS: ret.extend(_dfs(child.op, visited)) if child.op.type in ('Conv2D',): # TODO: support depthwise conv. ret.append(child.op) return ret
Example #5
Source File: tf_utils.py From energy-py with MIT License | 5 votes |
def make_copy_ops(parent, child, scope='copy_ops'): """ Creates the operations to copy variables args parent (list of tf.Variables) child (list of tf.Variables) returns copy_ops (list of tf.Operations) tau (tf.placeholder) """ with tf.variable_scope(scope): tau = tf.Variable(1.0, name='tau') copy_ops = [] for p, c in zip(parent, child): assert p.name.split('/')[1:] == c.name.split('/')[1:] copy_ops.append( c.assign(tf.add(tf.multiply(p, tau), tf.multiply(c, 1 - tau)) ) ) return copy_ops, tau
Example #6
Source File: tensorflow_translator.py From eran with Apache License 2.0 | 4 votes |
def __init__(self, model, session = None): """ This constructor takes a reference to a TensorFlow Operation or Tensor or Keras model and then applies the two TensorFlow functions graph_util.convert_variables_to_constants and graph_util.remove_training_nodes to cleanse the graph of any nodes that are linked to training. This leaves us with the nodes you need for inference. In the resulting graph there should only be tf.Operations left that have one of the following types [Const, MatMul, Add, BiasAdd, Conv2D, Reshape, MaxPool, AveragePool, Placeholder, Relu, Sigmoid, Tanh] If the input should be a Keras model we will ignore operations with type Pack, Shape, StridedSlice, and Prod such that the Flatten layer can be used. Arguments --------- model : tensorflow.Tensor or tensorflow.Operation or tensorflow.python.keras.engine.sequential.Sequential or keras.engine.sequential.Sequential if tensorflow.Tensor: model.op will be treated as the output node of the TensorFlow model. Make sure that the graph only contains supported operations after applying graph_util.convert_variables_to_constants and graph_util.remove_training_nodes with [model.op.name] as output_node_names if tensorflow.Operation: model will be treated as the output of the TensorFlow model. Make sure that the graph only contains supported operations after applying graph_util.convert_variables_to_constants and graph_util.remove_training_nodes with [model.op.name] as output_node_names if tensorflow.python.keras.engine.sequential.Sequential: x = model.layers[-1].output.op.inputs[0].op will be treated as the output node of the Keras model. Make sure that the graph only contains supported operations after applying graph_util.convert_variables_to_constants and graph_util.remove_training_nodes with [x.name] as output_node_names if keras.engine.sequential.Sequential: x = model.layers[-1].output.op.inputs[0].op will be treated as the output node of the Keras model. Make sure that the graph only contains supported operations after applying graph_util.convert_variables_to_constants and graph_util.remove_training_nodes with [x.name] as output_node_names session : tf.Session session which contains the information about the trained variables. If None the code will take the Session from tf.get_default_session(). If you pass a keras model you don't have to provide a session, this function will automatically get it. """ output_names = None if issubclass(model.__class__, tf.Tensor): output_names = [model.op.name] elif issubclass(model.__class__, tf.Operation): output_names = [model.name] elif issubclass(model.__class__, Sequential): session = tf.keras.backend.get_session() output_names = [model.layers[-1].output.op.inputs[0].op.name] model = model.layers[-1].output.op elif issubclass(model.__class__, onnx.ModelProto): assert 0, 'not tensorflow model' else: import keras if issubclass(model.__class__, keras.engine.sequential.Sequential): session = keras.backend.get_session() output_names = [model.layers[-1].output.op.inputs[0].op.name] model = model.layers[-1].output.op else: assert 0, "ERAN can't recognize this input" if session is None: session = tf.get_default_session() tmp = graph_util.convert_variables_to_constants(session, model.graph.as_graph_def(), output_names) self.graph_def = graph_util.remove_training_nodes(tmp)