Python onnx.TensorProto() Examples

The following are 9 code examples of onnx.TensorProto(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module onnx , or try the search function .
Example #1
Source File: run_onnx_util.py    From chainer-compiler with MIT License 6 votes vote down vote up
def load_test_data(data_dir, input_names, output_names):
    inout_values = []
    for kind, names in [('input', input_names), ('output', output_names)]:
        names = list(names)
        values = []
        for pb in sorted(glob.glob(os.path.join(data_dir, '%s_*.pb' % kind))):
            with open(pb, 'rb') as f:
                tensor = onnx.TensorProto()
                tensor.ParseFromString(f.read())
            if tensor.name in names:
                name = tensor.name
                names.remove(name)
            else:
                name = names.pop(0)
            values.append((name, onnx.numpy_helper.to_array(tensor)))
        inout_values.append(values)
    return tuple(inout_values) 
Example #2
Source File: onnx_test_parser.py    From deep500 with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _load_protobuf_data(self, model_dir,
                            data_sets: List[OnnxTestData]):
        for test_data_dir in glob.glob(os.path.join(model_dir, "test_data_set*")):
            inputs = {}
            outputs = {}
            
            inputs_num = len(glob.glob(os.path.join(test_data_dir, 'input_*.pb')))
            for i in range(inputs_num):
                input_file = os.path.join(test_data_dir, 'input_{}.pb'.format(i))
                tensor = onnx.TensorProto()
                with open(input_file, 'rb') as f:
                    tensor.ParseFromString(f.read())
                inputs[tensor.name] = numpy_helper.to_array(tensor)
            ref_outputs_num = len(glob.glob(os.path.join(test_data_dir, 'output_*.pb')))
            for i in range(ref_outputs_num):
                output_file = os.path.join(test_data_dir, 'output_{}.pb'.format(i))
                tensor = onnx.TensorProto()
                with open(output_file, 'rb') as f:
                    tensor.ParseFromString(f.read())
                outputs[tensor.name] = numpy_helper.to_array(tensor)

            data_sets.append(OnnxTestData(inputs, outputs)) 
Example #3
Source File: _op_translations.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def convert_cast(node, **kwargs):
    """Map MXNet's Cast operator attributes to onnx's Cast operator
    and return the created node.
    """
    onnx = import_onnx_modules()
    name = node["name"]
    proc_nodes = kwargs["proc_nodes"]
    inputs = node["inputs"]
    dtype = node["attrs"]["dtype"]

    # dtype can be mapped only with types from TensorProto
    # float32 is mapped to float and float64 to double in onnx
    # following tensorproto mapping https://github.com/onnx/onnx/blob/master/onnx/mapping.py
    if dtype == 'float32':
        dtype = 'float'
    elif dtype == 'float64':
        dtype = 'double'

    input_node_id = kwargs["index_lookup"][inputs[0][0]]
    input_node = proc_nodes[input_node_id].name

    node = onnx.helper.make_node(
        "Cast",
        [input_node],
        [name],
        to=getattr(onnx.TensorProto, dtype.upper()),
        name=name,
    )
    return [node] 
Example #4
Source File: import_benchmarkscript.py    From deeplearning-benchmark with Apache License 2.0 5 votes vote down vote up
def get_model_input(model_dir):
    import onnx
    from onnx import numpy_helper

    model_inputs = []
    for test_data_npz in glob.glob(
            os.path.join(model_dir, 'test_data_*.npz')):
        test_data = np.load(test_data_npz, encoding='bytes')
        model_inputs = list(test_data['inputs'])

    for test_data_dir in glob.glob(
            os.path.join(model_dir, "test_data_set*")):
        inputs_num = len(glob.glob(os.path.join(test_data_dir, 'input_*.pb')))
        for i in range(inputs_num):
            input_file = os.path.join(test_data_dir, 'input_{}.pb'.format(i))
            tensor = onnx.TensorProto()
            with open(input_file, 'rb') as f:
                tensor.ParseFromString(f.read())
            model_inputs.append(numpy_helper.to_array(tensor))

    input_shape = model_inputs[-1].shape
    # generating 1000 data points for inference time test
    for _ in range(1000 - len(model_inputs)):
        model_inputs.append(np.random.randn(*input_shape))

    return model_inputs 
Example #5
Source File: test_models.py    From onnx-mxnet with Apache License 2.0 5 votes vote down vote up
def read_pb_file(data_file):
    """ Helper function to get data from pb files"""
    tensor = onnx.TensorProto()
    with open(data_file, 'rb') as pb_file:
        tensor.ParseFromString(pb_file.read())
    return numpy_helper.to_array(tensor) 
Example #6
Source File: check_model.py    From OLive with MIT License 5 votes vote down vote up
def readInputFromFile(full_path):
    t = onnx.TensorProto()
    with open(full_path, 'rb') as f:
        t.ParseFromString(f.read())
    return t


# Generate a {input/output_name: input/output_arr} dictionary 
Example #7
Source File: verify.py    From onnx-fb-universe with MIT License 5 votes vote down vote up
def equalAndThen(self, x, y, msg, k):
        """
        Helper for implementing 'requireEqual' and 'checkEqual'.  Upon failure,
        invokes continuation 'k' with the error message.
        """
        if isinstance(x, onnx.TensorProto) and isinstance(y, onnx.TensorProto):
            self.equalAndThen(x.name, y.name, msg, k)
            # Use numpy for the comparison
            t1 = onnx.numpy_helper.to_array(x)
            t2 = onnx.numpy_helper.to_array(y)
            new_msg = "{}In embedded parameter '{}'".format(colonize(msg), x.name)
            self.equalAndThen(t1, t2, new_msg, k)
        elif isinstance(x, np.ndarray) and isinstance(y, np.ndarray):
            try:
                np.testing.assert_equal(x, y)
            except AssertionError as e:
                raise
                k("{}{}".format(colonize(msg, ": "), str(e).lstrip()))
        else:
            if x != y:
                # TODO: Better algorithm for lists
                sx = str(x)
                sy = str(y)
                if len(sx) > 40 or len(sy) > 40 or '\n' in sx or '\n' in sy:
                    # long form
                    l = "=" * 50
                    k("\n{}The value\n{}\n{}\n{}\n\ndoes not equal\n\n{}\n{}\n{}"
                        .format(colonize(msg, ":\n"), l, sx, l, l, sy, l))
                else:
                    k("{}{} != {}".format(colonize(msg), sx, sy)) 
Example #8
Source File: update-models-from-caffe2.py    From onnx-fb-universe with MIT License 5 votes vote down vote up
def tensortype_to_ndarray(tensor_type):
    shape = []
    for dim in tensor_type.shape.dim:
        shape.append(dim.dim_value)
    if tensor_type.elem_type == onnx.TensorProto.FLOAT:
        type = np.float32
    elif tensor_type.elem_type == onnx.TensorProto.INT:
        type = np.int32
    else:
        raise
    array = np.random.rand(*shape).astype(type)
    return array 
Example #9
Source File: onnx_test.py    From SNIPER-mxnet with Apache License 2.0 5 votes vote down vote up
def get_test_files(name):
    """Extract tar file and returns model path and input, output data"""
    tar_name = download(URLS.get(name), dirname=CURR_PATH.__str__())
    # extract tar file
    tar_path = os.path.join(CURR_PATH, tar_name)
    tar = tarfile.open(tar_path.__str__(), "r:*")
    tar.extractall(path=CURR_PATH.__str__())
    tar.close()
    data_dir = os.path.join(CURR_PATH, name)
    model_path = os.path.join(data_dir, 'model.onnx')

    inputs = []
    outputs = []
    # get test files
    for test_file in os.listdir(data_dir):
        case_dir = os.path.join(data_dir, test_file)
        # skip the non-dir files
        if not os.path.isdir(case_dir):
            continue
        input_file = os.path.join(case_dir, 'input_0.pb')
        input_tensor = TensorProto()
        with open(input_file, 'rb') as proto_file:
            input_tensor.ParseFromString(proto_file.read())
        inputs.append(numpy_helper.to_array(input_tensor))

        output_tensor = TensorProto()
        output_file = os.path.join(case_dir, 'output_0.pb')
        with open(output_file, 'rb') as proto_file:
            output_tensor.ParseFromString(proto_file.read())
        outputs.append(numpy_helper.to_array(output_tensor))

    return model_path, inputs, outputs