Python caffe.proto.caffe_pb2.LayerParameter() Examples

The following are 5 code examples of caffe.proto.caffe_pb2.LayerParameter(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module caffe.proto.caffe_pb2 , or try the search function .
Example #1
Source File: caffe_emitter.py    From MMdnn with MIT License 5 votes vote down vote up
def get_layer_list(self):
        try:
            from caffe.proto import caffe_pb2
            layer = caffe_pb2.LayerParameter()
            param_list = [f.name for f in layer.DESCRIPTOR.fields if f.name.endswith('_param')]
            layer_list = [type(getattr(layer, s)).__name__ for s in param_list]
            layer_list = [s[:-len('Parameter')] for s in layer_list]
            return layer_list
        except:
            return [] 
Example #2
Source File: MyCaffe.py    From onnx2caffe with MIT License 5 votes vote down vote up
def param_name_dict():
    """Find out the correspondence between layer names and parameter names."""

    layer = caffe_pb2.LayerParameter()
    # get all parameter names (typically underscore case) and corresponding
    # type names (typically camel case), which contain the layer names
    # (note that not all parameters correspond to layers, but we'll ignore that)
    param_names = [f.name for f in layer.DESCRIPTOR.fields if f.name.endswith('_param')]
    param_type_names = [type(getattr(layer, s)).__name__ for s in param_names]
    # strip the final '_param' or 'Parameter'
    param_names = [s[:-len('_param')] for s in param_names]
    param_type_names = [s[:-len('Parameter')] for s in param_type_names]
    return dict(zip(param_type_names, param_names)) 
Example #3
Source File: MyCaffe.py    From onnx2caffe with MIT License 5 votes vote down vote up
def _to_proto(self):
        bottom_names = []
        for inp in self.inputs:
            # inp._to_proto(layers, names, autonames)
            bottom_names.append(inp)
        layer = caffe_pb2.LayerParameter()
        layer.type = self.type_name
        layer.bottom.extend(bottom_names)

        if self.in_place:
            layer.top.extend(layer.bottom)
        else:
            for top in self.outputs:
                layer.top.append(top)
        layer.name = self.layer_name
        # print(self.type_name + "...")
        for k, v in six.iteritems(self.params):
            # special case to handle generic *params
            # print("generating "+k+"...")

            if k.endswith('param'):
                assign_proto(layer, k, v)
            else:
                try:
                    assign_proto(getattr(layer,
                        _param_names[self.type_name] + '_param'), k, v)
                except (AttributeError, KeyError):
                    assign_proto(layer, k, v)

        return layer 
Example #4
Source File: layerwise.py    From nideep with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def stack(net, level):

    n = pu.copy_net_params(net)

    enc_prev = None
    dec_prev = None
    enc = None
    dec = None
    for l in n.layer:
        if l.name.lower().endswith('encode%03dneuron' % (level - 1)):
            enc = pu.copy_msg(l, LayerParameter)

            for b in list(enc.bottom):
                l.bottom.remove(b)
            for t in list(l.top):
                l.bottom.append(unicode(t))  # preserve order of layer bottoms, label as bottom has to come last

        elif l.name.lower().endswith('decode%03dneuron' % (level - 1)):
            dec_prev = l
            dec = pu.copy_msg(l, LayerParameter)

    enc.name = 'encode%03dneuron' % level
    dec.name = 'encode%03dneuron' % level


    return n 
Example #5
Source File: convert_to_fully_conv.py    From mix-and-match with MIT License 4 votes vote down vote up
def main(args):
    caffe.set_mode_cpu()
    fc_net = caffe.Net(args.model, args.weights, caffe.TEST)
    # make fully conv prototxt
    fc_proto = caffe_pb2.NetParameter()
    with open(args.model, 'r') as f:
        pb.text_format.Parse(f.read(), fc_proto)
    layers = []
    fc_to_conv_dic = {}
    for layer in fc_proto.layer:
        if layer.type != 'InnerProduct':
            layers.append(layer)
            continue
        new_ = caffe_pb2.LayerParameter()
        new_.name = layer.name + '_conv'
        fc_to_conv_dic[layer.name] = new_.name
        new_.type = 'Convolution'
        new_.bottom.extend(layer.bottom)
        new_.top.extend(layer.top)
        new_.convolution_param.num_output = layer.inner_product_param.num_output
        bottom_shape = fc_net.blobs[layer.bottom[0]].data.shape
        if len(bottom_shape) == 4:
            new_.convolution_param.kernel_h = bottom_shape[2]
            new_.convolution_param.kernel_w = bottom_shape[3]
        else:
            new_.convolution_param.kernel_size = 1
        layers.append(new_)
    conv_proto = caffe_pb2.NetParameter()
    conv_proto.CopyFrom(fc_proto)
    del(conv_proto.layer[:])
    conv_proto.layer.extend(layers)
    if args.save_model is None:
        name, ext = osp.splitext(args.model)
        args.save_model = name + '_fully_conv' + ext
    with open(args.save_model, 'w') as f:
        f.write(pb.text_format.MessageToString(conv_proto))
    # make fully conv parameters
    conv_net = caffe.Net(args.save_model, args.weights, caffe.TEST)
    for fc, conv in fc_to_conv_dic.iteritems():
        conv_net.params[conv][0].data.flat = fc_net.params[fc][0].data.flat
        conv_net.params[conv][1].data[...] = fc_net.params[fc][1].data
    if args.save_weights is None:
        name, ext = osp.splitext(args.weights)
        args.save_weights = name + '_fully_conv' + ext
    conv_net.save(args.save_weights)
    print args.model, args.weights