Python chainer.initializers.GlorotUniform() Examples
The following are 10
code examples of chainer.initializers.GlorotUniform().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer.initializers
, or try the search function
.
Example #1
Source File: nets.py From text-gcn-chainer with Creative Commons Zero v1.0 Universal | 6 votes |
def __init__(self, in_size, out_size=None, nobias=True, initialW=None, initial_bias=None): super(GraphConvolution, self).__init__() if out_size is None: in_size, out_size = None, in_size self.out_size = out_size with self.init_scope(): if initialW is None: initialW = initializers.GlorotUniform() self.W = chainer.Parameter(initialW, (in_size, out_size)) if nobias: self.b = None else: if initial_bias is None: initial_bias = 0 bias_initializer = initializers._get_initializer(initial_bias) self.b = chainer.Parameter(bias_initializer, out_size)
Example #2
Source File: rnn_cells.py From knmt with GNU General Public License v3.0 | 5 votes |
def create_initializer(init_type, scale=None, fillvalue=None): if init_type == 'identity': return initializers.Identity() if scale is None else initializers.Identity(scale=scale) if init_type == 'constant': return initializers.Constant(fillvalue) if init_type == 'zero': return initializers.Zero() if init_type == 'one': return initializers.One() if init_type == 'normal': return initializers.Normal() if scale is None else initializers.Normal(scale) if init_type == 'glorotNormal': return initializers.GlorotNormal() if scale is None else initializers.GlorotNormal(scale) if init_type == 'heNormal': return initializers.HeNormal() if scale is None else initializers.HeNormal(scale) if init_type == 'orthogonal': return initializers.Orthogonal( scale) if scale is None else initializers.Orthogonal(scale) if init_type == 'uniform': return initializers.Uniform( scale) if scale is None else initializers.Uniform(scale) if init_type == 'leCunUniform': return initializers.LeCunUniform( scale) if scale is None else initializers.LeCunUniform(scale) if init_type == 'glorotUniform': return initializers.GlorotUniform( scale) if scale is None else initializers.GlorotUniform(scale) if init_type == 'heUniform': return initializers.HeUniform( scale) if scale is None else initializers.HeUniform(scale) raise ValueError("Unknown initializer type: {0}".format(init_type))
Example #3
Source File: test_link_n_step_rnn.py From chainer with MIT License | 5 votes |
def get_initializers(self): if self.initialW == 'zero': weight_initializer = initializers.constant.Zero() elif self.initialW == 'random': weight_initializer = initializers.GlorotUniform( rng=numpy.random.RandomState(seed=0)) if self.initial_bias == 'zero': bias_initializer = initializers.constant.Zero() elif self.initial_bias == 'random': bias_initializer = initializers.Uniform( rng=numpy.random.RandomState(seed=0)) return weight_initializer, bias_initializer
Example #4
Source File: test_crf1d.py From chainer with MIT License | 5 votes |
def setUp(self): self.n_label = 3 self.initial_cost = numpy.empty((self.n_label, self.n_label), dtype=self.dtype) if self.initializer is None: initializer = initializers.constant.Zero() elif self.initializer == 'random': initializer = initializers.GlorotUniform() initializer(self.initial_cost) with chainer.using_config('dtype', self.dtype): self.link = links.CRF1d(self.n_label, initial_cost=self.initial_cost)
Example #5
Source File: attention.py From models with MIT License | 5 votes |
def __init__(self, num_heads, size, dropout_ratio=0.1): super().__init__() assert size % num_heads == 0, "model size must be divisible by the number of heads" self.key_dimensionality = size // num_heads self.num_heads = num_heads self.attention = None self.dropout_ratio = dropout_ratio with self.init_scope(): self.linears = L.Linear(size, size, initialW=initializers.GlorotUniform()).repeat(4, mode='init')
Example #6
Source File: embedding.py From models with MIT License | 5 votes |
def __init__(self, size, vocab_size): super().__init__() self.size = size with self.init_scope(): self.embed = L.EmbedID(vocab_size, size, initialW=initializers.GlorotUniform())
Example #7
Source File: position_wise_feed_forward.py From models with MIT License | 5 votes |
def __init__(self, size, ff_size=2048, dropout_ratio=0.1): super().__init__() self.dropout_ratio = dropout_ratio with self.init_scope(): self.l1 = L.Linear(size, ff_size, initialW=initializers.GlorotUniform()) self.l2 = L.Linear(ff_size, size, initialW=initializers.GlorotUniform())
Example #8
Source File: attention.py From kiss with GNU General Public License v3.0 | 5 votes |
def __init__(self, num_heads, size, dropout_ratio=0.1): super().__init__() assert size % num_heads == 0, "model size must be divisable by the number of heads" self.key_dimensionality = size // num_heads self.num_heads = num_heads self.attention = None self.dropout_ratio = dropout_ratio with self.init_scope(): self.linears = L.Linear(size, size, initialW=initializers.GlorotUniform()).repeat(4, mode='init')
Example #9
Source File: embedding.py From kiss with GNU General Public License v3.0 | 5 votes |
def __init__(self, size, vocab_size): super().__init__() self.size = size with self.init_scope(): self.embed = L.EmbedID(vocab_size, size, initialW=initializers.GlorotUniform())
Example #10
Source File: position_wise_feed_forward.py From kiss with GNU General Public License v3.0 | 5 votes |
def __init__(self, size, ff_size=2048, dropout_ratio=0.1): super().__init__() self.dropout_ratio = dropout_ratio with self.init_scope(): self.l1 = L.Linear(size, ff_size, initialW=initializers.GlorotUniform()) self.l2 = L.Linear(ff_size, size, initialW=initializers.GlorotUniform())