Python torch.nn.init.constant_() Examples

The following are 30 code examples of torch.nn.init.constant_(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module torch.nn.init , or try the search function .
Example #1
Source File: gat_layers.py    From DeepInf with MIT License 6 votes vote down vote up
def __init__(self, n_head, f_in, f_out, attn_dropout, bias=True):
        super(BatchMultiHeadGraphAttention, self).__init__()
        self.n_head = n_head
        self.w = Parameter(torch.Tensor(n_head, f_in, f_out))
        self.a_src = Parameter(torch.Tensor(n_head, f_out, 1))
        self.a_dst = Parameter(torch.Tensor(n_head, f_out, 1))

        self.leaky_relu = nn.LeakyReLU(negative_slope=0.2)
        self.softmax = nn.Softmax(dim=-1)
        self.dropout = nn.Dropout(attn_dropout)
        if bias:
            self.bias = Parameter(torch.Tensor(f_out))
            init.constant_(self.bias, 0)
        else:
            self.register_parameter('bias', None)

        init.xavier_uniform_(self.w)
        init.xavier_uniform_(self.a_src)
        init.xavier_uniform_(self.a_dst) 
Example #2
Source File: network_msrresnet.py    From KAIR with MIT License 6 votes vote down vote up
def initialize_weights(net_l, scale=1):
    if not isinstance(net_l, list):
        net_l = [net_l]
    for net in net_l:
        for m in net.modules():
            if isinstance(m, nn.Conv2d):
                init.kaiming_normal_(m.weight, a=0, mode='fan_in')
                m.weight.data *= scale  # for residual block
                if m.bias is not None:
                    m.bias.data.zero_()
            elif isinstance(m, nn.Linear):
                init.kaiming_normal_(m.weight, a=0, mode='fan_in')
                m.weight.data *= scale
                if m.bias is not None:
                    m.bias.data.zero_()
            elif isinstance(m, nn.BatchNorm2d):
                init.constant_(m.weight, 1)
                init.constant_(m.bias.data, 0.0) 
Example #3
Source File: networks.py    From MeshCNN with MIT License 6 votes vote down vote up
def init_weights(net, init_type, init_gain):
    def init_func(m):
        classname = m.__class__.__name__
        if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1):
            if init_type == 'normal':
                init.normal_(m.weight.data, 0.0, init_gain)
            elif init_type == 'xavier':
                init.xavier_normal_(m.weight.data, gain=init_gain)
            elif init_type == 'kaiming':
                init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
            elif init_type == 'orthogonal':
                init.orthogonal_(m.weight.data, gain=init_gain)
            else:
                raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
        elif classname.find('BatchNorm2d') != -1:
            init.normal_(m.weight.data, 1.0, init_gain)
            init.constant_(m.bias.data, 0.0)
    net.apply(init_func) 
Example #4
Source File: outputs.py    From Parsing-R-CNN with MIT License 6 votes vote down vote up
def __init__(self, dim_in):
        super().__init__()
        self.dim_in = dim_in
        self.cls_on = cfg.FAST_RCNN.CLS_ON
        self.reg_on = cfg.FAST_RCNN.REG_ON

        if self.cls_on:
            self.cls_score = nn.Linear(self.dim_in, cfg.MODEL.NUM_CLASSES)
            init.normal_(self.cls_score.weight, std=0.01)
            init.constant_(self.cls_score.bias, 0)
        # self.avgpool = nn.AdaptiveAvgPool2d(1)
        if self.reg_on:
            if cfg.FAST_RCNN.CLS_AGNOSTIC_BBOX_REG:  # bg and fg
                self.bbox_pred = nn.Linear(self.dim_in, 4 * 2)
            else:
                self.bbox_pred = nn.Linear(self.dim_in, 4 * cfg.MODEL.NUM_CLASSES)
            init.normal_(self.bbox_pred.weight, std=0.001)
            init.constant_(self.bbox_pred.bias, 0) 
Example #5
Source File: utils.py    From prediction-flow with MIT License 6 votes vote down vote up
def init_weights(model):
    if isinstance(model, nn.Linear):
        if model.weight is not None:
            init.kaiming_uniform_(model.weight.data)
        if model.bias is not None:
            init.normal_(model.bias.data)
    elif isinstance(model, nn.BatchNorm1d):
        if model.weight is not None:
            init.normal_(model.weight.data, mean=1, std=0.02)
        if model.bias is not None:
            init.constant_(model.bias.data, 0)
    elif isinstance(model, nn.BatchNorm2d):
        if model.weight is not None:
            init.normal_(model.weight.data, mean=1, std=0.02)
        if model.bias is not None:
            init.constant_(model.bias.data, 0)
    elif isinstance(model, nn.BatchNorm3d):
        if model.weight is not None:
            init.normal_(model.weight.data, mean=1, std=0.02)
        if model.bias is not None:
            init.constant_(model.bias.data, 0)
    else:
        pass 
Example #6
Source File: gat_layers.py    From DeepInf with MIT License 6 votes vote down vote up
def __init__(self, n_head, f_in, f_out, attn_dropout, bias=True):
        super(MultiHeadGraphAttention, self).__init__()
        self.n_head = n_head
        self.w = Parameter(torch.Tensor(n_head, f_in, f_out))
        self.a_src = Parameter(torch.Tensor(n_head, f_out, 1))
        self.a_dst = Parameter(torch.Tensor(n_head, f_out, 1))

        self.leaky_relu = nn.LeakyReLU(negative_slope=0.2)
        self.softmax = nn.Softmax(dim=-1)
        self.dropout = nn.Dropout(attn_dropout)

        if bias:
            self.bias = Parameter(torch.Tensor(f_out))
            init.constant_(self.bias, 0)
        else:
            self.register_parameter('bias', None)

        init.xavier_uniform_(self.w)
        init.xavier_uniform_(self.a_src)
        init.xavier_uniform_(self.a_dst) 
Example #7
Source File: CNN_GNN2018.py    From Pytorch-Networks with MIT License 6 votes vote down vote up
def _initialize_weights_norm(self):

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                init.normal_(m.weight, std=0.01)
                if m.bias is not None:  # mobilenet conv2d doesn't add bias
                    init.constant_(m.bias, 0.0)

        # last layer of these block don't have Relu
        init.normal_(self.model1_1[8].weight, std=0.01)
        init.normal_(self.model1_2[8].weight, std=0.01)

        init.normal_(self.model2_1[12].weight, std=0.01)
        init.normal_(self.model3_1[12].weight, std=0.01)
        init.normal_(self.model4_1[12].weight, std=0.01)
        init.normal_(self.model5_1[12].weight, std=0.01)
        init.normal_(self.model6_1[12].weight, std=0.01)

        init.normal_(self.model2_2[12].weight, std=0.01)
        init.normal_(self.model3_2[12].weight, std=0.01)
        init.normal_(self.model4_2[12].weight, std=0.01)
        init.normal_(self.model5_2[12].weight, std=0.01)
        init.normal_(self.model6_2[12].weight, std=0.01) 
Example #8
Source File: module_util.py    From BasicSR with Apache License 2.0 6 votes vote down vote up
def initialize_weights(net_l, scale=1):
    if not isinstance(net_l, list):
        net_l = [net_l]
    for net in net_l:
        for m in net.modules():
            if isinstance(m, nn.Conv2d):
                init.kaiming_normal_(m.weight, a=0, mode='fan_in')
                m.weight.data *= scale  # for residual block
                if m.bias is not None:
                    m.bias.data.zero_()
            elif isinstance(m, nn.Linear):
                init.kaiming_normal_(m.weight, a=0, mode='fan_in')
                m.weight.data *= scale
                if m.bias is not None:
                    m.bias.data.zero_()
            elif isinstance(m, nn.BatchNorm2d):
                init.constant_(m.weight, 1)
                init.constant_(m.bias.data, 0.0) 
Example #9
Source File: GNNlikeCNN2015.py    From Pytorch-Networks with MIT License 6 votes vote down vote up
def _initialize_weights_norm(self):

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                init.normal_(m.weight, std=0.01)
                if m.bias is not None:  # mobilenet conv2d doesn't add bias
                    init.constant_(m.bias, 0.0)

        # last layer of these block don't have Relu
        init.normal_(self.model1_1[8].weight, std=0.01)
        init.normal_(self.model1_2[8].weight, std=0.01)

        init.normal_(self.model2_1[12].weight, std=0.01)
        init.normal_(self.model3_1[12].weight, std=0.01)
        init.normal_(self.model4_1[12].weight, std=0.01)
        init.normal_(self.model5_1[12].weight, std=0.01)
        init.normal_(self.model6_1[12].weight, std=0.01)

        init.normal_(self.model2_2[12].weight, std=0.01)
        init.normal_(self.model3_2[12].weight, std=0.01)
        init.normal_(self.model4_2[12].weight, std=0.01)
        init.normal_(self.model5_2[12].weight, std=0.01)
        init.normal_(self.model6_2[12].weight, std=0.01) 
Example #10
Source File: spnasnet.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #11
Source File: rir_cifar.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #12
Source File: diapreresnet_cifar.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #13
Source File: bagnet.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #14
Source File: condensenet.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0)
            elif isinstance(module, nn.BatchNorm2d):
                init.constant_(module.weight, 1)
                init.constant_(module.bias, 0)
            elif isinstance(module, nn.Linear):
                init.constant_(module.bias, 0) 
Example #15
Source File: preresnet_cifar.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #16
Source File: polynet.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #17
Source File: inceptionv4.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #18
Source File: msdnet.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #19
Source File: rnn.py    From PySyft with Apache License 2.0 5 votes vote down vote up
def reset_parameters(self):
        super(LSTMCell, self).reset_parameters()

        # Bias of forget gate should be initialize with 1 or 2
        # Ref: http://proceedings.mlr.press/v37/jozefowicz15.pdf
        incr_bias = 1.0 / self.hidden_size
        init.constant_(self.fc_xh.bias[self.hidden_size : 2 * self.hidden_size], incr_bias)
        init.constant_(self.fc_hh.bias[self.hidden_size : 2 * self.hidden_size], incr_bias) 
Example #20
Source File: mobilenetv2.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #21
Source File: seresnet_cifar.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #22
Source File: ibnresnet.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #23
Source File: squeezenet.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                if 'final_conv' in name:
                    init.normal_(module.weight, mean=0.0, std=0.01)
                else:
                    init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #24
Source File: resnext.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #25
Source File: sknet.py    From imgclsmob with MIT License 5 votes vote down vote up
def _init_params(self):
        for name, module in self.named_modules():
            if isinstance(module, nn.Conv2d):
                init.kaiming_uniform_(module.weight)
                if module.bias is not None:
                    init.constant_(module.bias, 0) 
Example #26
Source File: common_utils.py    From conditional-motion-propagation with MIT License 5 votes vote down vote up
def init_weights(net, init_type='normal', init_gain=0.02):
    """Initialize network weights.
    Parameters:
        net (network)   -- network to be initialized
        init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal
        init_gain (float)    -- scaling factor for normal, xavier and orthogonal.
    We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might
    work better for some applications. Feel free to try yourself.
    """
    def init_func(m):  # define the initialization function
        classname = m.__class__.__name__
        if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1):
            if init_type == 'normal':
                init.normal_(m.weight.data, 0.0, init_gain)
            elif init_type == 'xavier':
                init.xavier_normal_(m.weight.data, gain=init_gain)
            elif init_type == 'kaiming':
                init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
            elif init_type == 'orthogonal':
                init.orthogonal_(m.weight.data, gain=init_gain)
            else:
                raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
            if hasattr(m, 'bias') and m.bias is not None:
                init.constant_(m.bias.data, 0.0)
        elif classname.find('BatchNorm2d') != -1:  # BatchNorm Layer's weight is not a matrix; only normal distribution applies.
            init.normal_(m.weight.data, 1.0, init_gain)
            init.constant_(m.bias.data, 0.0)

    net.apply(init_func)  # apply the initialization function <init_func> 
Example #27
Source File: __init__.py    From sunets with MIT License 5 votes vote down vote up
def init_params(net):
    '''Init layer parameters.'''
    for m in net.modules():
        if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
            init.kaiming_normal_(m.weight, mode='fan_out')
            if m.bias.data is not None:
                init.constant_(m.bias, 0)
        elif isinstance(m, nn.BatchNorm2d):
            init.constant_(m.weight, 1)
            init.constant_(m.bias, 0)
        elif isinstance(m, nn.Linear):
            if m.bias.data is not None:
                init.constant_(m.bias, 0) 
Example #28
Source File: pix2pix.py    From ncsn with GNU General Public License v3.0 5 votes vote down vote up
def init_weights(net, init_type='normal', init_gain=0.02):
    """Initialize network weights.

    Parameters:
        net (network)   -- network to be initialized
        init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal
        init_gain (float)    -- scaling factor for normal, xavier and orthogonal.

    We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might
    work better for some applications. Feel free to try yourself.
    """

    def init_func(m):  # define the initialization function
        classname = m.__class__.__name__
        if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1):
            if init_type == 'normal':
                init.normal_(m.weight.data, 0.0, init_gain)
            elif init_type == 'xavier':
                init.xavier_normal_(m.weight.data, gain=init_gain)
            elif init_type == 'kaiming':
                init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
            elif init_type == 'orthogonal':
                init.orthogonal_(m.weight.data, gain=init_gain)
            else:
                raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
            if hasattr(m, 'bias') and m.bias is not None:
                init.constant_(m.bias.data, 0.0)
        elif classname.find(
                'BatchNorm2d') != -1:  # BatchNorm Layer's weight is not a matrix; only normal distribution applies.
            init.normal_(m.weight.data, 1.0, init_gain)
            init.constant_(m.bias.data, 0.0)

    print('initialize network with %s' % init_type)
    net.apply(init_func)  # apply the initialization function <init_func> 
Example #29
Source File: networks.py    From viton-gan with MIT License 5 votes vote down vote up
def weights_init_normal(m):
    classname = m.__class__.__name__
    if classname.find('Conv') != -1:
        init.normal_(m.weight.data, 0.0, 0.02)
    elif classname.find('Linear') != -1:
        init.normal(m.weight.data, 0.0, 0.02)
    elif classname.find('BatchNorm2d') != -1:
        init.normal_(m.weight.data, 1.0, 0.02)
        init.constant_(m.bias.data, 0.0) 
Example #30
Source File: utils.py    From BigGAN-pytorch with Apache License 2.0 5 votes vote down vote up
def weights_init(m):
    classname = m.__class__.__name__
    # print(classname)
    if classname.find('Conv2d') != -1:
        init.xavier_normal_(m.weight.data)
        init.constant_(m.bias.data, 0.0)
    elif classname.find('Linear') != -1:
        init.xavier_normal_(m.weight.data)
        init.constant_(m.bias.data, 0.0)