Python chainer.functions.softplus() Examples
The following are 12
code examples of chainer.functions.softplus().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer.functions
, or try the search function
.
Example #1
Source File: shifted_softplus.py From chainer-chemistry with MIT License | 6 votes |
def shifted_softplus(x, beta=1, shift=0.5, threshold=20): """shifted softplus function, which holds f(0)=0. Args: x (Variable): Input variable beta (float): Parameter :math:`\\beta`. shift (float): Shift Parameter threshold (float): threshold to avoid overflow Returns: output (Variable): Output variable whose shape is same with `x` """ xp = chainer.cuda.get_array_module(x) cond = chainer.as_variable(x).array > threshold x = functions.where(cond, x, functions.softplus(x, beta=beta)) x += xp.log(shift) return x
Example #2
Source File: policies.py From baselines with MIT License | 6 votes |
def __init__(self, n_actions, n_input_channels=4, activation=F.relu, bias=0.1, var_param_init=0, # var_func=F.softplus, hiddens=None): self.n_input_channels = n_input_channels self.activation = activation self.hiddens = [512] if hiddens is None else hiddens # self.var_func = var_func super(ActorTRPONetForContinuous, self).__init__() with self.init_scope(): self.conv_layers = chainer.ChainList( L.Convolution2D(n_input_channels, 32, 8, stride=4, initial_bias=bias), L.Convolution2D(32, 64, 4, stride=2, initial_bias=bias), L.Convolution2D(64, 64, 3, stride=1, initial_bias=bias)) self.a_stream = chainerrl.links.mlp.MLP(None, n_actions, self.hiddens) self.var_param = chainer.Parameter(initializer=var_param_init, shape=(1,)) # self.var_param = chainer.Parameter( # initializer=var_param_init, shape=(n_actions,)) # independent
Example #3
Source File: distribution.py From chainerrl with MIT License | 5 votes |
def _tanh_forward_log_det_jacobian(x): """Compute log|det(dy/dx)| except summation where y=tanh(x).""" # For the derivation of this formula, see: # https://github.com/tensorflow/probability/blob/master/tensorflow_probability/python/bijectors/tanh.py # NOQA return 2. * (np.log(2.) - x - F.softplus(-2. * x))
Example #4
Source File: gaussian_policy.py From chainerrl with MIT License | 5 votes |
def compute_mean_and_var(self, x): h = x for layer in self.hidden_layers: h = self.nonlinearity(layer(h)) mean = self.mean_layer(h) if self.bound_mean: mean = bound_by_tanh(mean, self.min_action, self.max_action) var = F.broadcast_to(F.softplus(self.var_layer(h)), mean.shape) + \ self.min_var return mean, var
Example #5
Source File: gaussian_policy.py From chainerrl with MIT License | 5 votes |
def __init__(self, n_input_channels, action_size, n_hidden_layers=0, n_hidden_channels=None, min_action=None, max_action=None, bound_mean=False, var_type='spherical', nonlinearity=F.relu, mean_wscale=1, var_func=F.softplus, var_param_init=0, ): self.n_input_channels = n_input_channels self.action_size = action_size self.n_hidden_layers = n_hidden_layers self.n_hidden_channels = n_hidden_channels self.min_action = min_action self.max_action = max_action self.bound_mean = bound_mean self.nonlinearity = nonlinearity self.var_func = var_func var_size = {'spherical': 1, 'diagonal': action_size}[var_type] layers = [] layers.append(L.Linear(n_input_channels, n_hidden_channels)) for _ in range(n_hidden_layers - 1): layers.append(self.nonlinearity) layers.append(L.Linear(n_hidden_channels, n_hidden_channels)) layers.append(self.nonlinearity) # The last layer is used to compute the mean layers.append( L.Linear(n_hidden_channels, action_size, initialW=LeCunNormal(mean_wscale))) if self.bound_mean: layers.append(lambda x: bound_by_tanh( x, self.min_action, self.max_action)) super().__init__() with self.init_scope(): self.hidden_layers = links.Sequence(*layers) self.var_param = chainer.Parameter( initializer=var_param_init, shape=(var_size,))
Example #6
Source File: gaussian_policy.py From chainerrl with MIT License | 5 votes |
def __init__( self, action_size, var_type='spherical', var_func=F.softplus, var_param_init=0, ): self.var_func = var_func var_size = {'spherical': 1, 'diagonal': action_size}[var_type] super().__init__() with self.init_scope(): self.var_param = chainer.Parameter( initializer=var_param_init, shape=(var_size,))
Example #7
Source File: cn_agents.py From cryptotrader with MIT License | 5 votes |
def compute_mean_and_var(self, x): # mean = F.relu(self.mean_layer_1(x)) # mean = self.bn_mean(mean) mean = self.mean_layer_2(x) # var = F.relu(self.var_layer_1(x)) # var = self.bn_var(var) var = F.softplus(self.var_layer_2(x)) return mean, var
Example #8
Source File: test_softplus.py From chainer with MIT License | 5 votes |
def forward(self, inputs, device): x, = inputs return functions.softplus(x, beta=self.beta),
Example #9
Source File: net.py From models with MIT License | 5 votes |
def _encode(self, xs): exs = self.embed_mat(xs) h = F.tanh(self.l1(exs)) logits = F.softplus(self.l2(h)) logits = F.log(logits + 1e-10).reshape(-1, self.M, self.K) return logits, exs
Example #10
Source File: loss_functions.py From chainer-gan-experiments with MIT License | 5 votes |
def loss_func_dcgan_dis_real(y_real): return F.sum(F.softplus(-y_real)) / np.prod(y_real.data.shape)
Example #11
Source File: loss_functions.py From chainer-gan-experiments with MIT License | 5 votes |
def loss_func_dcgan_dis_fake(y_fake): return F.sum(F.softplus(y_fake)) / np.prod(y_fake.data.shape)
Example #12
Source File: loss_functions.py From chainer-gan-experiments with MIT License | 5 votes |
def loss_sigmoid_cross_entropy_with_logits(x, t): return F.average(x - x*t + F.softplus(-x))# / x.data.shape[0]