Python tensorflow.python.util.compat.as_str_any() Examples
The following are 30
code examples of tensorflow.python.util.compat.as_str_any().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.util.compat
, or try the search function
.
Example #1
Source File: session_ops.py From lambda-packs with MIT License | 6 votes |
def __init__(self, handle, dtype, session): """Constructs a new tensor handle. A tensor handle for a persistent tensor is a python string that has the form of "tensor_name;unique_id;device_name". Args: handle: A tensor handle. dtype: The data type of the tensor represented by `handle`. session: The session in which the tensor is produced. """ self._handle = compat.as_str_any(handle) self._resource_handle = None self._dtype = dtype self._session = session self._auto_gc_enabled = True
Example #2
Source File: summary.py From lambda-packs with MIT License | 6 votes |
def get_summary_description(node_def): """Given a TensorSummary node_def, retrieve its SummaryDescription. When a Summary op is instantiated, a SummaryDescription of associated metadata is stored in its NodeDef. This method retrieves the description. Args: node_def: the node_def_pb2.NodeDef of a TensorSummary op Returns: a summary_pb2.SummaryDescription Raises: ValueError: if the node is not a summary op. """ if node_def.op != 'TensorSummary': raise ValueError("Can't get_summary_description on %s" % node_def.op) description_str = _compat.as_str_any(node_def.attr['description'].s) summary_description = SummaryDescription() _json_format.Parse(description_str, summary_description) return summary_description
Example #3
Source File: file_io.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 6 votes |
def list_directory(dirname): """Returns a list of entries contained within a directory. The list is in arbitrary order. It does not contain the special entries "." and "..". Args: dirname: string, path to a directory Returns: [filename1, filename2, ... filenameN] as strings Raises: errors.NotFoundError if directory doesn't exist """ if not is_directory(dirname): raise errors.NotFoundError(None, None, "Could not find directory") with errors.raise_exception_on_not_ok_status() as status: # Convert each element to string, since the return values of the # vector of string should be interpreted as strings, not bytes. return [ compat.as_str_any(filename) for filename in pywrap_tensorflow.GetChildren( compat.as_bytes(dirname), status) ]
Example #4
Source File: file_io.py From keras-lambda with MIT License | 6 votes |
def get_matching_files(filename): """Returns a list of files that match the given pattern. Args: filename: string, the pattern Returns: Returns a list of strings containing filenames that match the given pattern. Raises: errors.OpError: If there are filesystem / directory listing errors. """ with errors.raise_exception_on_not_ok_status() as status: # Convert each element to string, since the return values of the # vector of string should be interpreted as strings, not bytes. return [compat.as_str_any(matching_filename) for matching_filename in pywrap_tensorflow.GetMatchingFiles( compat.as_bytes(filename), status)]
Example #5
Source File: summary.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 6 votes |
def get_summary_description(node_def): """Given a TensorSummary node_def, retrieve its SummaryDescription. When a Summary op is instantiated, a SummaryDescription of associated metadata is stored in its NodeDef. This method retrieves the description. Args: node_def: the node_def_pb2.NodeDef of a TensorSummary op Returns: a summary_pb2.SummaryDescription Raises: ValueError: if the node is not a summary op. """ if node_def.op != 'TensorSummary': raise ValueError("Can't get_summary_description on %s" % node_def.op) description_str = _compat.as_str_any(node_def.attr['description'].s) summary_description = SummaryDescription() _json_format.Parse(description_str, summary_description) return summary_description
Example #6
Source File: summary.py From keras-lambda with MIT License | 6 votes |
def get_summary_description(node_def): """Given a TensorSummary node_def, retrieve its SummaryDescription. When a Summary op is instantiated, a SummaryDescription of associated metadata is stored in its NodeDef. This method retrieves the description. Args: node_def: the node_def_pb2.NodeDef of a TensorSummary op Returns: a summary_pb2.SummaryDescription Raises: ValueError: if the node is not a summary op. """ if node_def.op != 'TensorSummary': raise ValueError("Can't get_summary_description on %s" % node_def.op) description_str = _compat.as_str_any(node_def.attr['description'].s) summary_description = SummaryDescription() _json_format.Parse(description_str, summary_description) return summary_description
Example #7
Source File: _layers_common.py From tf-coreml with Apache License 2.0 | 6 votes |
def identity(op, context, input_name = None, input_id = 0): is_network_output = False for out in op.outputs: if out.name in context.output_names: is_network_output = True break if input_name is None: input_name = compat.as_str_any(op.inputs[input_id].name) for out in op.outputs: output_name = compat.as_str_any(out.name) if op.inputs[input_id].op.type != 'Const': if is_network_output: context.builder.add_activation( output_name, 'LINEAR', input_name, output_name, [1.0, 0]) else: skip(op, context) context.translated[output_name] = True
Example #8
Source File: file_io.py From auto-alt-text-lambda-api with MIT License | 6 votes |
def get_matching_files(filename): """Returns a list of files that match the given pattern. Args: filename: string, the pattern Returns: Returns a list of strings containing filenames that match the given pattern. Raises: errors.OpError: If there are filesystem / directory listing errors. """ with errors.raise_exception_on_not_ok_status() as status: # Convert each element to string, since the return values of the # vector of string should be interpreted as strings, not bytes. return [compat.as_str_any(matching_filename) for matching_filename in pywrap_tensorflow.GetMatchingFiles( compat.as_bytes(filename), status)]
Example #9
Source File: summary.py From auto-alt-text-lambda-api with MIT License | 6 votes |
def get_summary_description(node_def): """Given a TensorSummary node_def, retrieve its SummaryDescription. When a Summary op is instantiated, a SummaryDescription of associated metadata is stored in its NodeDef. This method retrieves the description. Args: node_def: the node_def_pb2.NodeDef of a TensorSummary op Returns: a summary_pb2.SummaryDescription Raises: ValueError: if the node is not a summary op. """ if node_def.op != 'TensorSummary': raise ValueError("Can't get_summary_description on %s" % node_def.op) description_str = _compat.as_str_any(node_def.attr['description'].s) summary_description = SummaryDescription() _json_format.Parse(description_str, summary_description) return summary_description
Example #10
Source File: session_ops.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 6 votes |
def __init__(self, handle, dtype, session): """Constructs a new tensor handle. A tensor handle for a persistent tensor is a python string that has the form of "tensor_name;unique_id;device_name". Args: handle: A tensor handle. dtype: The data type of the tensor represented by `handle`. session: The session in which the tensor is produced. """ self._handle = compat.as_str_any(handle) self._resource_handle = None self._dtype = dtype self._session = session self._auto_gc_enabled = True
Example #11
Source File: training.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 6 votes |
def _export_eval_result(self, eval_result, checkpoint_path, is_the_final_export): """Export `eval_result` according to exporters in `EvalSpec`.""" export_dir_base = os.path.join( compat.as_str_any(self._estimator.model_dir), compat.as_str_any('export')) for exporter in self._eval_spec.exporters: exporter.export( estimator=self._estimator, export_path=os.path.join( compat.as_str_any(export_dir_base), compat.as_str_any(exporter.name)), checkpoint_path=checkpoint_path, eval_result=eval_result, is_the_final_export=is_the_final_export)
Example #12
Source File: summary.py From deep_image_model with Apache License 2.0 | 6 votes |
def get_summary_description(node_def): """Given a TensorSummary node_def, retrieve its SummaryDescription. When a Summary op is instantiated, a SummaryDescription of associated metadata is stored in its NodeDef. This method retrieves the description. Args: node_def: the node_def_pb2.NodeDef of a TensorSummary op Returns: a summary_pb2.SummaryDescription Raises: ValueError: if the node is not a summary op. """ if node_def.op != 'TensorSummary': raise ValueError("Can't get_summary_description on %s" % node_def.op) description_str = _compat.as_str_any(node_def.attr['description'].s) summary_description = SummaryDescription() _json_format.Parse(description_str, summary_description) return summary_description
Example #13
Source File: _layers.py From tf-coreml with Apache License 2.0 | 6 votes |
def lrn(op, context): input_name = make_tensor(op.inputs[0], context) output_name = compat.as_str_any(op.outputs[0].name) input_shape = context.shape_dict[input_name] C = input_shape[-1] alpha = op.get_attr('alpha') beta = op.get_attr('beta') bias = op.get_attr('bias') depth_radius = op.get_attr('depth_radius') context.builder.add_lrn(output_name, input_name, output_name, alpha=alpha * C, beta=beta, local_size=depth_radius, k=bias) context.translated[output_name] = True
Example #14
Source File: _layers.py From tf-coreml with Apache License 2.0 | 6 votes |
def one_hot(op, context): input_name = compat.as_str_any(op.inputs[0].name) output_name = compat.as_str_any(op.outputs[0].name) depth = context.consts[compat.as_str_any(op.inputs[1].name)] on_value = context.consts[compat.as_str_any(op.inputs[2].name)] off_value = context.consts[compat.as_str_any(op.inputs[3].name)] n_dims = depth W = np.ones((depth, depth)) * off_value for i in range(depth): W[i, i] = on_value context.builder.add_embedding(name=output_name, W=W, b=None, input_dim=n_dims, output_channels=n_dims, has_bias=False, input_name=input_name, output_name=output_name) context.translated[output_name] = True
Example #15
Source File: _layers.py From tf-coreml with Apache License 2.0 | 6 votes |
def relu6(op, context): input_name = make_tensor(op.inputs[0], context) output_name = compat.as_str_any(op.outputs[0].name) relu_output_name = 'relu_' + output_name context.builder.add_activation( relu_output_name, 'RELU', input_name, relu_output_name) neg_output_name = relu_output_name + '_neg' # negate it context.builder.add_activation( neg_output_name, 'LINEAR', relu_output_name, neg_output_name, [-1.0, 0]) # apply threshold clip_output_name = relu_output_name + '_clip' context.builder.add_unary( clip_output_name, neg_output_name, clip_output_name, 'threshold', alpha=-6.0) # negate it back context.builder.add_activation( output_name, 'LINEAR', clip_output_name, output_name, [-1.0, 0]) context.translated[output_name] = True
Example #16
Source File: _layers.py From tf-coreml with Apache License 2.0 | 6 votes |
def product(op, context): input_name = make_tensor(op.inputs[0], context) output_name = compat.as_str_any(op.outputs[0].name) start_ind = context.consts[op.inputs[1].name] assert start_ind == 0, 'Prod: only start index = 0 case supported' input_shape = context.shape_dict[input_name] if len(input_shape) == 1: axis = 'C' else: assert False, 'Reduce Sum axis case not handled currently' mode = 'prod' context.translated[output_name] = True context.builder.add_reduce(output_name, input_name, output_name, axis, mode)
Example #17
Source File: session_ops.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def _get_device_name(handle): """The device name encoded in the handle.""" handle_str = compat.as_str_any(handle) return pydev.canonical_name(handle_str.split(";")[-1])
Example #18
Source File: _layers.py From tf-coreml with Apache License 2.0 | 5 votes |
def random(op, context): # TODO - CoreML does not have random output_name = compat.as_str_any(op.outputs[0].name) output_shape = context.shape_dict[output_name] add_const(context, output_name, np.zeros((output_shape)), output_name) context.translated[output_name] = True
Example #19
Source File: _layers.py From tf-coreml with Apache License 2.0 | 5 votes |
def minimum(op, context): input_names = [compat.as_str_any(x.name) for x in op.inputs] output_name = compat.as_str_any(op.outputs[0].name) output_shape = context.shape_dict[output_name] for inp in input_names: if inp in context.consts: x = context.consts[inp] x = np.broadcast_to(x,output_shape) add_const(context, inp, x, inp) context.builder.add_elementwise(output_name, input_names, output_name, 'MIN') context.translated[output_name] = True
Example #20
Source File: session_ops.py From keras-lambda with MIT License | 5 votes |
def _get_device_name(handle): """The device name encoded in the handle.""" handle_str = compat.as_str_any(handle) return pydev.canonical_name(handle_str.split(";")[-1])
Example #21
Source File: _layers.py From tf-coreml with Apache License 2.0 | 5 votes |
def maximum(op, context): input_names = [compat.as_str_any(x.name) for x in op.inputs] output_name = compat.as_str_any(op.outputs[0].name) output_shape = context.shape_dict[output_name] for inp in input_names: if inp in context.consts: x = context.consts[inp] x = np.broadcast_to(x,output_shape) add_const(context, inp, x, inp) context.builder.add_elementwise(output_name, input_names, output_name, 'MAX') context.translated[output_name] = True
Example #22
Source File: _layers.py From tf-coreml with Apache License 2.0 | 5 votes |
def real_div(op, context): output_name = compat.as_str_any(op.outputs[0].name) input_names = [] for inp in op.inputs: input_names.append(make_tensor(inp, context)) add_tensor_div( context.builder, output_name, input_names[0], input_names[1], output_name) context.translated[output_name] = True
Example #23
Source File: _layers.py From tf-coreml with Apache License 2.0 | 5 votes |
def sigmoid(op, context): input_name = make_tensor(op.inputs[0], context) output_name = compat.as_str_any(op.outputs[0].name) context.translated[output_name] = True context.builder.add_activation( output_name, 'SIGMOID', input_name, output_name)
Example #24
Source File: _layers.py From tf-coreml with Apache License 2.0 | 5 votes |
def resize_bilinear(op, context): input_name = compat.as_str_any(op.inputs[0].name) output_name = compat.as_str_any(op.outputs[0].name) if op.inputs[1].name in context.consts: output_spatial_sizes = context.consts[op.inputs[1].name] else: output_spatial_sizes = context.session.run( op.inputs[1].name, feed_dict=context.input_feed_dict) shape = context.shape_dict[input_name] assert (len(shape) == 4), ('Resize Bilinear: input must be 4-D shape. Input shape = {}'. format(str(shape))) if op.get_attr('align_corners'): mode = 'STRICT_ALIGN_ENDPOINTS_MODE' else: mode = 'UPSAMPLE_MODE' if mode == 'UPSAMPLE_MODE' and (output_spatial_sizes[0] % shape[1] == 0) and (output_spatial_sizes[1] % shape[2] == 0): upsample_factor_height = output_spatial_sizes[0] // shape[1] upsample_factor_width = output_spatial_sizes[1] // shape[2] context.builder.add_upsample(output_name, upsample_factor_height, upsample_factor_width, input_name, output_name, mode='BILINEAR') else: context.builder.add_resize_bilinear(output_name, input_name, output_name, target_height=output_spatial_sizes[0], target_width=output_spatial_sizes[1], mode=mode) context.builder.spec.specificationVersion = 3 context.translated[output_name] = True
Example #25
Source File: _layers.py From tf-coreml with Apache License 2.0 | 5 votes |
def resize_nearest_neighbor(op, context): input_name = compat.as_str_any(op.inputs[0].name) output_name = compat.as_str_any(op.outputs[0].name) if op.inputs[1].name in context.consts : output_spatial_sizes = context.consts[op.inputs[1].name] else: output_spatial_sizes = context.session.run(op.inputs[1].name, feed_dict= context.input_feed_dict) shape = context.shape_dict[input_name] assert (len(shape) == 4), ('Resize Nearest Neighbour: unrecognized 4-D shape. Input shape = {}'. format(str(shape))) assert (output_spatial_sizes[0] % shape[1] == 0), \ ('Resize Nearest Neighbour: height upsampling factor must be an integer. ' 'Input height = {}, output height = {}, ratio = {}'.format(shape[1],output_spatial_sizes[0],output_spatial_sizes[0]/shape[1])) assert (output_spatial_sizes[1] % shape[2] == 0), \ ('Resize Nearest Neighbour: width upsampling factor must be an integer. ' 'Input width = {}, output width = {}, ratio = {}'.format(shape[2],output_spatial_sizes[1],output_spatial_sizes[1]/shape[2])) upsample_factor_height = output_spatial_sizes[0] // shape[1] upsample_factor_width = output_spatial_sizes[1] // shape[2] context.builder.add_upsample( output_name, upsample_factor_height, upsample_factor_width, input_name, output_name, mode='NN') context.translated[output_name] = True
Example #26
Source File: _layers.py From tf-coreml with Apache License 2.0 | 5 votes |
def square(op, context): input_name = make_tensor(op.inputs[0], context) output_name = compat.as_str_any(op.outputs[0].name) context.translated[output_name] = True context.builder.add_elementwise( output_name, [input_name, input_name], output_name, 'MULTIPLY')
Example #27
Source File: file_io.py From keras-lambda with MIT License | 5 votes |
def walk(top, in_order=True): """Recursive directory tree generator for directories. Args: top: string, a Directory name in_order: bool, Traverse in order if True, post order if False. Errors that happen while listing directories are ignored. Yields: Each yield is a 3-tuple: the pathname of a directory, followed by lists of all its subdirectories and leaf files. (dirname, [subdirname, subdirname, ...], [filename, filename, ...]) as strings """ top = compat.as_str_any(top) try: listing = list_directory(top) except errors.NotFoundError: return files = [] subdirs = [] for item in listing: full_path = os.path.join(top, item) if is_directory(full_path): subdirs.append(item) else: files.append(item) here = (top, subdirs, files) if in_order: yield here for subdir in subdirs: for subitem in walk(os.path.join(top, subdir), in_order): yield subitem if not in_order: yield here
Example #28
Source File: _layers.py From tf-coreml with Apache License 2.0 | 5 votes |
def squared_difference(op, context): input_name = compat.as_str_any(op.inputs[0].name) input2 = compat.as_str_any(op.inputs[1].name) output_name = compat.as_str_any(op.outputs[0].name) context.translated[output_name] = True add_tensor_sub( context.builder, output_name + '_difference', input_name, input2, output_name + '_difference') context.builder.add_elementwise( output_name, [output_name + '_difference', output_name + '_difference'], output_name, 'MULTIPLY')
Example #29
Source File: event_accumulator.py From keras-lambda with MIT License | 5 votes |
def IsTensorFlowEventsFile(path): """Check the path name to see if it is probably a TF Events file.""" return 'tfevents' in compat.as_str_any(os.path.basename(path))
Example #30
Source File: file_io.py From keras-lambda with MIT License | 5 votes |
def readline(self): r"""Reads the next line from the file. Leaves the '\n' at the end.""" self._preread_check() return compat.as_str_any(self._read_buf.ReadLineAsString())