Python chainer.initializers._get_initializer() Examples
The following are 29
code examples of chainer.initializers._get_initializer().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer.initializers
, or try the search function
.
Example #1
Source File: nets.py From text-gcn-chainer with Creative Commons Zero v1.0 Universal | 6 votes |
def __init__(self, in_size, out_size=None, nobias=True, initialW=None, initial_bias=None): super(GraphConvolution, self).__init__() if out_size is None: in_size, out_size = None, in_size self.out_size = out_size with self.init_scope(): if initialW is None: initialW = initializers.GlorotUniform() self.W = chainer.Parameter(initialW, (in_size, out_size)) if nobias: self.b = None else: if initial_bias is None: initial_bias = 0 bias_initializer = initializers._get_initializer(initial_bias) self.b = chainer.Parameter(bias_initializer, out_size)
Example #2
Source File: scale.py From chainer-stylegan with MIT License | 6 votes |
def __init__(self, axis=1, W_shape=None, bias_term=False, bias_shape=None, initialW=None, initial_bias=None): super(Scale, self).__init__() self.axis = axis with self.init_scope(): # Add W parameter and/or bias term. if W_shape is not None: if initialW is None: initialW = 1 W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer, W_shape) if bias_term: self.bias = Bias(axis, W_shape, initial_bias) else: if bias_term: if bias_shape is None: raise ValueError( 'bias_shape should be given if W is not ' 'learnt parameter and bias_term is True.') self.bias = Bias(axis, W_shape, initial_bias)
Example #3
Source File: StatelessLSTM.py From chainer-compiler with MIT License | 6 votes |
def _initialize_params(self): lateral_init = initializers._get_initializer(self.lateral_init) upward_init = initializers._get_initializer(self.upward_init) bias_init = initializers._get_initializer(self.bias_init) forget_bias_init = initializers._get_initializer(self.forget_bias_init) for i in six.moves.range(0, 4 * self.state_size, self.state_size): lateral_init(self.lateral.W.data[i:i + self.state_size, :]) upward_init(self.upward.W.data[i:i + self.state_size, :]) a, i, f, o = lstm._extract_gates( self.upward.b.data.reshape(1, 4 * self.state_size, 1)) bias_init(a) bias_init(i) forget_bias_init(f) bias_init(o)
Example #4
Source File: StatelessLSTM.py From chainer-compiler with MIT License | 6 votes |
def _initialize_params(self): lateral_init = initializers._get_initializer(self.lateral_init) upward_init = initializers._get_initializer(self.upward_init) bias_init = initializers._get_initializer(self.bias_init) forget_bias_init = initializers._get_initializer(self.forget_bias_init) for i in six.moves.range(0, 4 * self.state_size, self.state_size): lateral_init(self.lateral.W.data[i:i + self.state_size, :]) upward_init(self.upward.W.data[i:i + self.state_size, :]) a, i, f, o = lstm._extract_gates( self.upward.b.data.reshape(1, 4 * self.state_size, 1)) bias_init(a) bias_init(i) forget_bias_init(f) bias_init(o)
Example #5
Source File: condensenet.py From imgclsmob with MIT License | 6 votes |
def __init__(self, units, in_units, drop_rate=0.5): super(CondenseLinear, self).__init__() drop_in_units = int(in_units * drop_rate) with self.init_scope(): self.dense = L.Linear( in_size=drop_in_units, out_size=units) self.index = initializers.generate_array( initializer=initializers._get_initializer(0), shape=(drop_in_units,), xp=self.xp, dtype=np.int32) self.register_persistent("index")
Example #6
Source File: local_convolution_2d.py From chainer with MIT License | 6 votes |
def __init__(self, in_channels, out_channels, in_size=None, ksize=None, stride=1, nobias=False, initialW=None, initial_bias=None, **kwargs): super(LocalConvolution2D, self).__init__() self.ksize = ksize self.stride = _pair(stride) self.nobias = nobias self.out_channels = out_channels with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer) if nobias: self.b = None else: if initial_bias is None: initial_bias = 0 bias_initializer = initializers._get_initializer(initial_bias) self.b = variable.Parameter(bias_initializer) if in_channels is not None and in_size is not None: self._initialize_params(in_channels, _pair(in_size))
Example #7
Source File: group_normalization.py From chainer with MIT License | 6 votes |
def __init__(self, groups, size=None, eps=1e-5, initial_gamma=None, initial_beta=None): super(GroupNormalization, self).__init__() if initial_gamma is None: initial_gamma = 1 if initial_beta is None: initial_beta = 0 highprec_dtype = chainer.get_dtype( None, map_mixed16=numpy.float32) with self.init_scope(): self.groups = groups gamma_initializer = \ initializers._get_initializer(initial_gamma) gamma_initializer.dtype = highprec_dtype beta_initializer = \ initializers._get_initializer(initial_beta) beta_initializer.dtype = highprec_dtype self.gamma = variable.Parameter(gamma_initializer) self.beta = variable.Parameter(beta_initializer) self.eps = eps if size is not None: self._initialize_params(size)
Example #8
Source File: lstm.py From chainer with MIT License | 6 votes |
def _initialize_params(self): lateral_init = initializers._get_initializer(self.lateral_init) upward_init = initializers._get_initializer(self.upward_init) bias_init = initializers._get_initializer(self.bias_init) forget_bias_init = initializers._get_initializer(self.forget_bias_init) for i in six.moves.range(0, 4 * self.state_size, self.state_size): lateral_init(self.lateral.W.array[i:i + self.state_size, :]) upward_init(self.upward.W.array[i:i + self.state_size, :]) a, i, f, o = lstm._extract_gates( self.upward.b.array.reshape(1, 4 * self.state_size, 1)) bias_init(a) bias_init(i) forget_bias_init(f) bias_init(o)
Example #9
Source File: simplified_dropconnect.py From chainer with MIT License | 6 votes |
def __init__(self, in_size, out_size, ratio=.5, nobias=False, initialW=None, initial_bias=None): super(SimplifiedDropconnect, self).__init__() self.out_size = out_size self.ratio = ratio if initialW is None: initialW = initializers.HeNormal(1. / numpy.sqrt(2)) with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer) if in_size is not None: self._initialize_params(in_size) if nobias: self.b = None else: if initial_bias is None: initial_bias = initializers.Constant(0) bias_initializer = initializers._get_initializer(initial_bias) self.b = variable.Parameter(bias_initializer, out_size)
Example #10
Source File: depthwise_convolution_2d.py From chainer with MIT License | 5 votes |
def __init__(self, in_channels, channel_multiplier, ksize, stride=1, pad=0, nobias=False, initialW=None, initial_bias=None): super(DepthwiseConvolution2D, self).__init__() self.ksize = ksize self.stride = _pair(stride) self.pad = _pair(pad) self.channel_multiplier = channel_multiplier self.nobias = nobias if initialW is None: initialW = initializers.HeNormal(1. / numpy.sqrt(2)) with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer) if nobias: self.b = None else: if initial_bias is None: initial_bias = initializers.Constant(0) bias_initializer = initializers._get_initializer(initial_bias) self.b = variable.Parameter(bias_initializer) if in_channels is not None: self._initialize_params(in_channels)
Example #11
Source File: bias.py From chainer-stylegan with MIT License | 5 votes |
def __init__(self, axis=1, shape=None, initial_bias=None): super(Bias, self).__init__() # Add b parameter if given. if shape is not None: with self.init_scope(): if initial_bias is None: initial_bias = 0 bias_initializer = initializers._get_initializer(initial_bias) self.b = variable.Parameter(bias_initializer, shape) self.axis = axis
Example #12
Source File: deconvolution_2d.py From chainer with MIT License | 5 votes |
def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0, nobias=False, outsize=None, initialW=None, initial_bias=None, **kwargs): super(Deconvolution2D, self).__init__() dilate, groups, = argument.parse_kwargs( kwargs, ('dilate', 1), ('groups', 1), deterministic='deterministic argument is not supported anymore. ' 'Use chainer.using_config(\'cudnn_deterministic\', value) ' 'context where value is either `True` or `False`.') if ksize is None: out_channels, ksize, in_channels = in_channels, out_channels, None self.ksize = ksize self.stride = _pair(stride) self.pad = _pair(pad) self.dilate = _pair(dilate) self.outsize = (None, None) if outsize is None else outsize self.out_channels = out_channels self.groups = int(groups) with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer) if in_channels is not None: self._initialize_params(in_channels) if nobias: self.b = None else: if isinstance(initial_bias, (numpy.ndarray, cuda.ndarray)): assert initial_bias.shape == (out_channels,) if initial_bias is None: initial_bias = 0 bias_initializer = initializers._get_initializer(initial_bias) self.b = variable.Parameter(bias_initializer, out_channels)
Example #13
Source File: batch_normalization.py From chainer with MIT License | 5 votes |
def _init_array(self, initializer, default_value, size): if initializer is None: initializer = default_value initializer = initializers._get_initializer(initializer) return initializers.generate_array( initializer, size, self.xp, dtype=self._highprec_dtype, device=self.device)
Example #14
Source File: normalize.py From chainercv with MIT License | 5 votes |
def __init__(self, n_channel, initial=0, eps=1e-5): super(Normalize, self).__init__() self.eps = eps with self.init_scope(): initializer = initializers._get_initializer(initial) self.scale = variable.Parameter(initializer) self.scale.initialize((n_channel),)
Example #15
Source File: active_batchnorm.py From voxelnet_chainer with MIT License | 5 votes |
def __init__(self, size, decay=0.9, eps=2e-5, dtype=numpy.float32, use_gamma=True, use_beta=True, initial_gamma=None, initial_beta=None): super(ActiveBatchNormalization, self).__init__() self.avg_mean = numpy.zeros(size, dtype=dtype) self.register_persistent('avg_mean') self.avg_var = numpy.zeros(size, dtype=dtype) self.register_persistent('avg_var') self.N = 0 self.register_persistent('N') self.decay = decay self.eps = eps with self.init_scope(): if use_gamma: if initial_gamma is None: initial_gamma = 1 initial_gamma = initializers._get_initializer(initial_gamma) initial_gamma.dtype = dtype self.gamma = variable.Parameter(initial_gamma, size) if use_beta: if initial_beta is None: initial_beta = 0 initial_beta = initializers._get_initializer(initial_beta) initial_beta.dtype = dtype self.beta = variable.Parameter(initial_beta, size)
Example #16
Source File: active_bn.py From voxelnet_chainer with MIT License | 5 votes |
def __init__(self, size, decay=0.9, eps=2e-5, dtype=numpy.float32, use_gamma=True, use_beta=True, initial_gamma=None, initial_beta=None): super(BatchNormalization, self).__init__() self.avg_mean = numpy.zeros(size, dtype=dtype) self.register_persistent('avg_mean') self.avg_var = numpy.zeros(size, dtype=dtype) self.register_persistent('avg_var') self.N = 0 self.register_persistent('N') self.decay = decay self.eps = eps with self.init_scope(): if use_gamma: if initial_gamma is None: initial_gamma = 1 initial_gamma = initializers._get_initializer(initial_gamma) initial_gamma.dtype = dtype self.gamma = variable.Parameter(initial_gamma, size) if use_beta: if initial_beta is None: initial_beta = 0 initial_beta = initializers._get_initializer(initial_beta) initial_beta.dtype = dtype self.beta = variable.Parameter(initial_beta, size)
Example #17
Source File: graph_convolution.py From chainer-graph-cnn with MIT License | 5 votes |
def __init__(self, in_channels, out_channels, A, K, wscale=1, bias=0, nobias=False, initialW=None, initial_bias=None): super(GraphConvolution, self).__init__() L = graph.create_laplacian(A) self.K = K self.out_channels = out_channels self.wscale = wscale self._W_initializer = initializers._get_initializer( initialW, scale=wscale) if in_channels is None: self.add_uninitialized_param('W') else: self._initialize_params(in_channels) if nobias: self.b = None else: if initial_bias is None: initial_bias = bias bias_initializer = initializers._get_initializer(initial_bias) self.add_param('b', out_channels, initializer=bias_initializer) self.func = graph_convolution.GraphConvolutionFunction(L, K)
Example #18
Source File: dilated_convolution_2d.py From chainer with MIT License | 5 votes |
def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0, dilate=1, nobias=False, initialW=None, initial_bias=None): super(DilatedConvolution2D, self).__init__() if ksize is None: out_channels, ksize, in_channels = in_channels, out_channels, None self.ksize = ksize self.stride = _pair(stride) self.pad = _pair(pad) self.dilate = _pair(dilate) self.out_channels = out_channels with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer) if in_channels is not None: self._initialize_params(in_channels) if nobias: self.b = None else: if initial_bias is None: initial_bias = 0 initial_bias = initializers._get_initializer(initial_bias) self.b = variable.Parameter(initial_bias, out_channels)
Example #19
Source File: deformable_convolution_2d.py From chainer with MIT License | 5 votes |
def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0, nobias=False, initialW=None, initial_bias=None): super(DeformableConvolution2DSampler, self).__init__() self.ksize = ksize self.stride = _pair(stride) self.pad = _pair(pad) self.out_channels = out_channels self.initialW = initialW if initialW is None: initialW = constant.Zero() with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer) if nobias: self.b = None else: if initial_bias is None: initial_bias = initializers.Constant(0) bias_initializer = initializers._get_initializer(initial_bias) self.b = variable.Parameter(bias_initializer) if in_channels is not None: self._initialize_params(in_channels)
Example #20
Source File: convolution_nd.py From chainer with MIT License | 5 votes |
def __init__(self, ndim, in_channels, out_channels, ksize=None, stride=1, pad=0, nobias=False, initialW=None, initial_bias=None, cover_all=False, dilate=1, groups=1): super(ConvolutionND, self).__init__() if ksize is None: out_channels, ksize, in_channels = \ in_channels, out_channels, None self.out_channels = out_channels self.ksize = conv_nd.as_tuple(ksize, ndim) self.stride = stride self.pad = pad self.cover_all = cover_all self.dilate = conv_nd.as_tuple(dilate, ndim) self.groups = int(groups) with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer) if in_channels is not None: self._initialize_params(in_channels) if nobias: self.b = None else: if initial_bias is None: initial_bias = 0 initial_bias = initializers._get_initializer(initial_bias) self.b = variable.Parameter(initial_bias, out_channels)
Example #21
Source File: linear.py From chainer with MIT License | 5 votes |
def __init__( self, in_size: tp.Optional[int], out_size: tp.Optional[int] = None, nobias: bool = False, initialW: tp.Optional[types.InitializerSpec] = None, initial_bias: tp.Optional[types.InitializerSpec] = None ) -> None: super(Linear, self).__init__() if out_size is None: in_size, out_size = None, in_size self.in_size = in_size self.out_size = out_size with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer) # type: variable.Variable # NOQA if in_size is not None: self._initialize_params(in_size) if nobias: self.b = None # type: tp.Optional[variable.Variable] else: if initial_bias is None: initial_bias = 0 bias_initializer = initializers._get_initializer(initial_bias) self.b = variable.Parameter(bias_initializer, out_size)
Example #22
Source File: test_init.py From chainer with MIT License | 5 votes |
def test_callable(self): def initializer(arr): arr[...] = 100 init = initializers._get_initializer(initializer) self.assertTrue(callable(init)) x = numpy.empty((2, 3), dtype=numpy.int32) init(x) expected = numpy.full((2, 3), 100, dtype=numpy.int32) numpy.testing.assert_array_equal(x, expected)
Example #23
Source File: test_init.py From chainer with MIT License | 5 votes |
def test_numpy_array(self): c = numpy.array([1, 2, 3]) init = initializers._get_initializer(c) self.assertIsInstance(init, initializers.Constant) x = numpy.empty((3,), dtype=numpy.int32) init(x) expected = numpy.array([1, 2, 3], dtype=numpy.int32) numpy.testing.assert_array_equal(x, expected)
Example #24
Source File: test_init.py From chainer with MIT License | 5 votes |
def test_scalar(self): init = initializers._get_initializer(10) self.assertIsInstance(init, initializers.Constant) x = numpy.empty((2, 3), dtype=numpy.int32) init(x) expected = numpy.full((2, 3), 10, dtype=numpy.int32) numpy.testing.assert_array_equal(x, expected)
Example #25
Source File: batch_normalization.py From chainer with MIT License | 5 votes |
def __init__(self, size, comm, decay=0.9, eps=2e-5, dtype=None, use_gamma=True, use_beta=True, initial_gamma=None, initial_beta=None, communication_backend='auto'): chainer.utils.experimental( 'chainermn.links.MultiNodeBatchNormalization') super(MultiNodeBatchNormalization, self).__init__() self._highprec_dtype = chainer.get_dtype( dtype, map_mixed16=numpy.float32) self.comm = comm self.avg_mean = numpy.zeros(size, dtype=self._highprec_dtype) self.register_persistent('avg_mean') self.avg_var = numpy.zeros(size, dtype=self._highprec_dtype) self.register_persistent('avg_var') self.N = 0 self.register_persistent('N') self.decay = decay self.eps = eps self._communication_backend = \ chainermn_batch_normalization.get_communication_backend( comm, communication_backend) with self.init_scope(): if use_gamma: if initial_gamma is None: initial_gamma = 1 initial_gamma = initializers._get_initializer(initial_gamma) initial_gamma.dtype = self._highprec_dtype self.gamma = variable.Parameter(initial_gamma, size) if use_beta: if initial_beta is None: initial_beta = 0 initial_beta = initializers._get_initializer(initial_beta) initial_beta.dtype = self._highprec_dtype self.beta = variable.Parameter(initial_beta, size)
Example #26
Source File: condensenet.py From imgclsmob with MIT License | 5 votes |
def __init__(self, in_channels, out_channels, ksize, stride, pad, groups): super(CondenseComplexConv, self).__init__() with self.init_scope(): self.bn = L.BatchNormalization(size=in_channels) self.activ = F.relu self.conv = L.Convolution2D( in_channels=in_channels, out_channels=out_channels, ksize=ksize, stride=stride, pad=pad, nobias=True, groups=groups) self.c_shuffle = ChannelShuffle( channels=out_channels, groups=groups) self.index = initializers.generate_array( initializer=initializers._get_initializer(0), shape=(in_channels,), xp=self.xp, dtype=np.int32) self.register_persistent("index")
Example #27
Source File: sn_convolution_nd.py From Deep_VoiceChanger with MIT License | 5 votes |
def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0, nobias=False, initialW=None, initial_bias=None, cover_all=False, use_gamma=False, Ip=1, factor=None): super(SNConvolutionND, self).__init__() ksize = conv_nd.as_tuple(ksize, ndim) self.stride = stride self.pad = pad self.cover_all = cover_all self.use_gamma = use_gamma self.Ip = Ip self.u = np.random.normal(size=(1, out_channels)).astype(dtype="f") self.register_persistent('u') self.factor = factor with self.init_scope(): W_shape = (out_channels, in_channels) + ksize self.W = variable.Parameter( initializers._get_initializer(initialW), W_shape) if nobias: self.b = None else: if initial_bias is None: initial_bias = 0 initial_bias = initializers._get_initializer(initial_bias) self.b = variable.Parameter(initial_bias, out_channels) if self.use_gamma: W_mat = self.W.data.reshape(self.W.shape[0], -1) _, s, _ = np.linalg.svd(W_mat) self.gamma = variable.Parameter(s[0], (1,) * len(self.W.shape))
Example #28
Source File: bilinear.py From chainer with MIT License | 4 votes |
def __init__(self, left_size, right_size, out_size, nobias=False, initialW=None, initial_bias=None): super(Bilinear, self).__init__() self.in_sizes = (left_size, right_size) self.nobias = nobias # TODO(Kenta OONO): I do not know appropriate way of # initializing weights in tensor network. # This initialization is a modification of # that of Linear function. with self.init_scope(): shape = (left_size, right_size, out_size) if isinstance(initialW, (numpy.ndarray, cuda.ndarray)): assert initialW.shape == shape self.W = variable.Parameter( initializers._get_initializer(initialW), shape) if not self.nobias: V1_shape = (left_size, out_size) V2_shape = (right_size, out_size) b_shape = (out_size,) if isinstance(initial_bias, tuple): initialV1, initialV2, initialb = initial_bias if isinstance(initialV1, (numpy.ndarray, cuda.ndarray)): assert initialV1.shape == V1_shape if isinstance(initialV2, (numpy.ndarray, cuda.ndarray)): assert initialV2.shape == V2_shape if isinstance(initialb, (numpy.ndarray, cuda.ndarray)): assert initialb.shape == b_shape initialV1 = initializers._get_initializer(initialV1) initialV2 = initializers._get_initializer(initialV2) initialb = initializers._get_initializer(initialb) elif initial_bias is None: initialV1 = initializers._get_initializer(None) initialV2 = initializers._get_initializer(None) initialb = 0 else: raise ValueError('initial_bias must be tuple or None') self.V1 = variable.Parameter(initialV1, V1_shape) self.V2 = variable.Parameter(initialV2, V2_shape) self.b = variable.Parameter(initialb, b_shape)
Example #29
Source File: convolution_2d.py From chainer with MIT License | 4 votes |
def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0, nobias=False, initialW=None, initial_bias=None, **kwargs): super(Convolution2D, self).__init__() dilate, groups = argument.parse_kwargs( kwargs, ('dilate', 1), ('groups', 1), deterministic='deterministic argument is not supported anymore. ' 'Use chainer.using_config(\'cudnn_deterministic\', value) ' 'context where value is either `True` or `False`.') if ksize is None: out_channels, ksize, in_channels = in_channels, out_channels, None self.cudnn_fast = chainer.get_compute_mode() == 'cudnn_fast' if self.cudnn_fast: x_layout = memory_layouts.CUDNN_CHANNEL_LAST_X w_layout = memory_layouts.CUDNN_CHANNEL_LAST_W else: x_layout = memory_layouts.CUDNN_CHANNEL_FIRST_X w_layout = memory_layouts.CUDNN_CHANNEL_FIRST_W self.ksize = ksize self.stride = _pair(stride) self.pad = _pair(pad) self.dilate = _pair(dilate) self.in_channels = in_channels self.out_channels = out_channels self.groups = int(groups) self.x_layout = x_layout with self.init_scope(): W_initializer = initializers._get_initializer(initialW) self.W = variable.Parameter(W_initializer, layout=w_layout) if in_channels is not None: self._initialize_params(in_channels) if nobias: self.b = None else: if initial_bias is None: initial_bias = 0 bias_initializer = initializers._get_initializer(initial_bias) self.b = variable.Parameter(bias_initializer, out_channels)