Python chainer.as_variable() Examples
The following are 30
code examples of chainer.as_variable().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer
, or try the search function
.
Example #1
Source File: test_variable.py From chainer with MIT License | 6 votes |
def check_backward_accumulate(self, xp): inputs = self._get_inputs() a, b, c = [inputs[i] for i in self.var_mapping] y = muladd(a, b, c) y.grad = self.gy y.backward() inputs2 = self._get_inputs() a2, b2, c2 = [inputs2[i] for i in self.var_mapping] y2 = chainer.as_variable(a2 * b2 + c2) y2.grad = self.gy y2.backward() tol = {'atol': 1e-4, 'rtol': 1e-4} for x, x2, (isvar, _) in zip( inputs, inputs2, self.inputs_isvar_hasgrad): if isvar: xp.testing.assert_allclose(x.grad, x2.grad, **tol)
Example #2
Source File: fix.py From chainer with MIT License | 6 votes |
def fix(x): """Elementwise fix function. .. math:: y_i = \\lfix x_i \\rfix Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ if isinstance(x, chainer.variable.Variable): x = x.array xp = backend.get_array_module(x) return chainer.as_variable(utils.force_array(xp.fix(x), x.dtype))
Example #3
Source File: ceil.py From chainer with MIT License | 6 votes |
def ceil(x): """Elementwise ceil function. .. math:: y_i = \\lceil x_i \\rceil Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ if isinstance(x, chainer.variable.Variable): x = x.data xp = backend.get_array_module(x) return chainer.as_variable(utils.force_array(xp.ceil(x), x.dtype))
Example #4
Source File: test_replace_func.py From onnx-chainer with MIT License | 6 votes |
def get_model(self): class Model(chainer.Chain): def __init__(self): super().__init__() with self.init_scope(): self.l = L.Linear(None, 2) def half(self, xs, value=0.5): return xs * value def forward(self, xs): h = self.l(xs) h = self.half(h) return F.sum(chainer.as_variable(h)) return Model()
Example #5
Source File: test_inout.py From onnx-chainer with MIT License | 6 votes |
def test_hook_for_funcnode(self, test_type): class Model(chainer.Chain): def forward(self, x): if test_type in ['variable', 'array']: x = [chainer.as_variable(x)] elif test_type == 'dict': x = list(x.values()) x.append(chainer.Variable(np.array(7, np.float32))) return F.stack(x) model = Model() x = self.get_x(test_type) with RetainInputHook() as h: model(x) expected_count = 1 if test_type == 'array': # input is ndarray and not checked in forward_preprocess expected_count += 1 assert len(h.retain_inputs) == expected_count
Example #6
Source File: updater.py From become-yukarin with MIT License | 6 votes |
def forward(self, input, target, mask): input = chainer.as_variable(input) target = chainer.as_variable(target) mask = chainer.as_variable(mask) output = self.predictor(input) output = output * mask target = target * mask d_fake = self.discriminator(input, output) d_real = self.discriminator(input, target) loss = { 'predictor': self._loss_predictor(self.predictor, output, target, d_fake), 'discriminator': self._loss_discriminator(self.discriminator, d_real, d_fake), } return loss
Example #7
Source File: shifted_softplus.py From chainer-chemistry with MIT License | 6 votes |
def shifted_softplus(x, beta=1, shift=0.5, threshold=20): """shifted softplus function, which holds f(0)=0. Args: x (Variable): Input variable beta (float): Parameter :math:`\\beta`. shift (float): Shift Parameter threshold (float): threshold to avoid overflow Returns: output (Variable): Output variable whose shape is same with `x` """ xp = chainer.cuda.get_array_module(x) cond = chainer.as_variable(x).array > threshold x = functions.where(cond, x, functions.softplus(x, beta=beta)) x += xp.log(shift) return x
Example #8
Source File: convolution_2d.py From chainer with MIT License | 6 votes |
def forward(self, x): """Applies the convolution layer. Args: x (~chainer.Variable): Input image. Returns: ~chainer.Variable: Output of the convolution. """ x = chainer.as_variable(x) assert x.layout == self.x_layout # self.W can be a Variable instead of Parameter: #8462 # TODO(niboshi): Use Parameter.is_initialized. if self.W.raw_array is None: _, c, _, _ = memory_layouts.get_semantic_shape( x, assumed_layout=self.x_layout) self._initialize_params(c) return convolution_2d.convolution_2d( x, self.W, self.b, self.stride, self.pad, dilate=self.dilate, groups=self.groups, cudnn_fast=self.cudnn_fast)
Example #9
Source File: updater.py From yukarin with MIT License | 6 votes |
def forward(self, input, target, mask): input = chainer.as_variable(input) target = chainer.as_variable(target) mask = chainer.as_variable(mask) output = self.predictor(input) output = output * mask target = target * mask d_fake = self.discriminator(input, output) d_real = self.discriminator(input, target) loss = { 'predictor': _loss_predictor(self.predictor, output, target, d_fake, self.loss_config), 'discriminator': _loss_discriminator(self.discriminator, d_real, d_fake), } return loss
Example #10
Source File: test_replace_func.py From chainer with MIT License | 6 votes |
def get_model(self): class Model(chainer.Chain): def __init__(self): super().__init__() with self.init_scope(): self.l = L.Linear(None, 2) def half(self, xs, value=0.5): return xs * value def forward(self, xs): h = self.l(xs) h = self.half(h) return F.sum(chainer.as_variable(h)) return Model()
Example #11
Source File: test_variable.py From chainer with MIT License | 6 votes |
def test_to_variable_from_array(self, backend_config): x = backend_config.get_array(np.random.randn(1).astype(np.float32)) y = chainer.as_variable(x) assert isinstance(y, chainer.Variable) assert y.requires_grad is False if backend_config.xp is chainerx: # chainerx assert y.array.shape == x.shape assert y.array.device == x.device assert y.array.strides == x.strides assert not y.array.is_backprop_required() chainerx.testing.assert_array_equal(y.array, x) else: # non-chainerx assert y.array is x
Example #12
Source File: bernoulli.py From chainer with MIT License | 5 votes |
def p(self): if self.__p is not None: return chainer.as_variable(self.__p) else: return sigmoid.sigmoid(self.logit)
Example #13
Source File: bernoulli.py From chainer with MIT License | 5 votes |
def logit(self): if self.__logit is not None: return chainer.as_variable(self.__logit) else: return exponential.log(self.p) - logarithm_1p.log1p(-self.p)
Example #14
Source File: laplace.py From chainer with MIT License | 5 votes |
def loc(self): return chainer.as_variable(self.__loc)
Example #15
Source File: laplace.py From chainer with MIT License | 5 votes |
def scale(self): return chainer.as_variable(self.__scale)
Example #16
Source File: updater.py From yukarin with MIT License | 5 votes |
def forward(self, x, y, mask_x, mask_y): x = chainer.as_variable(x) y = chainer.as_variable(y) mask_x = chainer.as_variable(mask_x) mask_y = chainer.as_variable(mask_y) x_y = self.predictor_xy(x) * mask_x x_y_x = self.predictor_yx(x_y) * mask_x x_y_buffer = chainer.as_variable(self._get_and_update_buffer(x_y.data, self._buffer_x)) y_x = self.predictor_yx(y) * mask_y y_x_y = self.predictor_xy(y_x) * mask_y y_x_buffer = chainer.as_variable(self._get_and_update_buffer(y_x.data, self._buffer_y)) dx_real = self.discriminator_x(x) dx_fake = self.discriminator_x(y_x) dx_fake_buffer = self.discriminator_x(y_x_buffer) dy_real = self.discriminator_y(y) dy_fake = self.discriminator_y(x_y) dy_fake_buffer = self.discriminator_y(x_y_buffer) l_p_x = _loss_predictor_cg(self.predictor_yx, x_y_x, x_y, x, dx_fake, self.loss_config) l_p_y = _loss_predictor_cg(self.predictor_xy, y_x_y, y_x, y, dy_fake, self.loss_config) loss_predictor = l_p_x + l_p_y loss = { 'predictor': loss_predictor, 'discriminator_x': _loss_discriminator(self.discriminator_x, dx_real, dx_fake_buffer), 'discriminator_y': _loss_discriminator(self.discriminator_y, dy_real, dy_fake_buffer), } return loss
Example #17
Source File: normal.py From chainer with MIT License | 5 votes |
def loc(self): return chainer.as_variable(self.__loc)
Example #18
Source File: normal.py From chainer with MIT License | 5 votes |
def scale(self): if self.__scale is not None: return chainer.as_variable(self.__scale) else: return exponential.exp(self.log_scale)
Example #19
Source File: pareto.py From chainer with MIT License | 5 votes |
def log_prob(self, x): x = chainer.as_variable(x) logp = ( self._log_alpha + self.alpha * self._log_scale - (self.alpha + 1) * exponential.log(x)) xp = logp.xp return where.where( utils.force_array(x.data >= self.scale.data), logp, xp.array(-xp.inf, logp.dtype))
Example #20
Source File: normal.py From chainer with MIT License | 5 votes |
def log_scale(self): if self.__log_scale is not None: return chainer.as_variable(self.__log_scale) else: return exponential.log(self.scale)
Example #21
Source File: cast.py From chainer with MIT License | 5 votes |
def cast(x, typ): """Cast an input variable to a given type. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable to be casted. A \ :math:`(s_1, s_2, ..., s_N)`-shaped array. typ (:class:`str` of dtype or :class:`numpy.dtype`): Typecode or data type to cast. Returns: ~chainer.Variable: Variable holding a casted array. .. admonition:: Example >>> x = np.arange(0, 3, dtype=np.float64) >>> x.dtype dtype('float64') >>> y = F.cast(x, np.float32) >>> y.dtype dtype('float32') >>> y = F.cast(x, 'float16') >>> y.dtype dtype('float16') """ if x.dtype == typ: if not chainer.config.enable_backprop: return chainer.as_variable(x) return Cast(typ).apply((x,))[0]
Example #22
Source File: broadcast.py From chainer with MIT License | 5 votes |
def broadcast(*args): """Broadcast given variables. Args: args (:class:`~chainer.Variable` or :ref:`ndarray`): Input variables to be broadcasted. Each dimension of the shapes \ of the input variables must have the same size. Returns: ~chainer.Variable: :class:`~chainer.Variable` or tuple of \ :class:`~chainer.Variable` objects which are broadcasted \ from the given arguments. .. admonition:: Example >>> x = np.random.uniform(0, 1, (3, 2)).astype(np.float32) >>> y = F.broadcast(x) >>> np.all(x == y.array) True >>> z = np.random.uniform(0, 1, (3, 2)).astype(np.float32) >>> y, w = F.broadcast(x, z) >>> np.all(x == y.array) & np.all(z == w.array) True """ if len(args) == 1: return chainer.as_variable(args[0]) return Broadcast().apply(args)
Example #23
Source File: broadcast.py From chainer with MIT License | 5 votes |
def broadcast_to(x, shape): """Broadcast a given variable to a given shape. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable to be broadcasted. A \ :math:`(s_1, s_2, ..., s_N)`-shaped float array. shape (tuple): Tuple of :class:`int` of the shape of the \ output variable. Returns: ~chainer.Variable: Output variable broadcasted to the given shape. .. admonition:: Example >>> x = np.arange(0, 3) >>> x array([0, 1, 2]) >>> y = F.broadcast_to(x, (3, 3)) >>> y.array array([[0, 1, 2], [0, 1, 2], [0, 1, 2]]) """ if x.shape == shape: return chainer.as_variable(x) y, = BroadcastTo(shape).apply((x,)) return y
Example #24
Source File: sum.py From chainer with MIT License | 5 votes |
def sum_to(x, shape): """Sum elements along axes to output an array of a given shape. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. shape (tuple of int): The target shape. Returns: ~chainer.Variable: Output variable of shape ``shape``. .. admonition:: Example >>> x = np.array([[1., 2., 3.], [4., 5., 6.]]) >>> x array([[1., 2., 3.], [4., 5., 6.]]) >>> y = F.sum_to(x, (1, 3)) >>> y variable([[5., 7., 9.]]) >>> z = F.sum_to(x, (2, 1)) >>> z variable([[ 6.], [15.]]) """ if x.shape == shape: return chainer.as_variable(x) y, = SumTo(shape).apply((x,)) return y
Example #25
Source File: sign.py From chainer with MIT License | 5 votes |
def sign(x): """Elementwise sign function. For a given input :math:`x`, this function returns :math:`sgn(x)` defined as .. math:: sgn(x) = \\left \\{ \\begin{array}{cc} -1 & {\\rm if~x < 0} \\\\ 0 & {\\rm if~x = 0} \\\\ 1 & {\\rm if~x > 0} \\\\ \\end{array} \\right. .. note:: The gradient of this function is ``None`` everywhere and therefore unchains the computational graph. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable for which the sign is computed. Returns: ~chainer.Variable: Output variable. """ if isinstance(x, chainer.variable.Variable): x = x.array xp = backend.get_array_module(x) return chainer.as_variable(utils.force_array(xp.sign(x)))
Example #26
Source File: export.py From onnx-chainer with MIT License | 5 votes |
def __init__(self): self.link_inputs = set() self.retain_inputs = [] self.replaced_inputs = [] self.org_apply = chainer.function_node.FunctionNode.apply def hooked_apply(_self, inputs): ret = self.org_apply(_self, inputs) func_inodes = list(_self.inputs) for i, inode in enumerate(func_inodes): referenced_var = inode.get_variable_or_none() if referenced_var is None: # This variable is created within function node and weakref # is lost. Make temporary variable and retain it. temp_var = chainer.as_variable(inputs[i]) func_inodes[i] = temp_var.node self.retain_inputs.append(temp_var) else: if id(referenced_var) not in self.link_inputs: # This variable is created within link forward, outside # of function node. To avoid to lose reference out # of the forward, retain the variable. self.retain_inputs.append(referenced_var) self.replaced_inputs.append((_self, _self.inputs)) _self.inputs = tuple(func_inodes) return ret self.hooked_apply = hooked_apply
Example #27
Source File: test_inout.py From onnx-chainer with MIT License | 5 votes |
def test_hook_for_childlink(self, test_type): # TODO(disktnk): test_type='variable' is failed class ChildModel(chainer.Chain): def forward(self, x, h): if test_type in ['variable', 'array']: h = [chainer.as_variable(h)] elif test_type == 'dict': h = list(h.values()) h.append(x) return F.stack(h) class ParentModel(chainer.Chain): def __init__(self, get_x): super().__init__() self.get_x = get_x with self.init_scope(): self.m = ChildModel() def forward(self, x): h = self.get_x(test_type) return self.m(x, h) model = ParentModel(self.get_x) x = self.get_x('variable') with RetainInputHook() as h: model(x) assert len(h.retain_inputs) == 1
Example #28
Source File: spatial_dropout.py From voxelnet_chainer with MIT License | 5 votes |
def spatial_dropout(x, ratio=.1, **kwargs): """spatial_dropout(x, ratio=.1)""" argument.check_unexpected_kwargs( kwargs, train='train argument is not supported anymore. ' 'Use chainer.using_config') argument.assert_kwargs_empty(kwargs) if configuration.config.train: return SpatialDropout(ratio).apply((x,))[0] return chainer.as_variable(x)
Example #29
Source File: abstract_network.py From delira with GNU Affero General Public License v3.0 | 5 votes |
def prepare_batch(batch: dict, input_device, output_device): """ Helper Function to prepare Network Inputs and Labels (convert them to correct type and shape and push them to correct devices) Parameters ---------- batch : dict dictionary containing all the data input_device : chainer.backend.Device or string device for network inputs output_device : torch.device device for network outputs Returns ------- dict dictionary containing data in correct type and shape and on correct device """ new_batch = {k: chainer.as_variable(v.astype(np.float32)) for k, v in batch.items()} for k, v in new_batch.items(): if k == "data": device = input_device else: device = output_device # makes modification inplace! v.to_device(device) return new_batch
Example #30
Source File: model.py From graph-nvp with MIT License | 5 votes |
def reverse(self, z, true_adj=None): """ Returns a molecule, given its latent vector. :param z: latent vector. Shape: [B, N*N*M + N*T] B = Batch size, N = number of atoms, M = number of bond types, T = number of atom types (Carbon, Oxygen etc.) :param true_adj: used for testing. An adjacency matrix of a real molecule :return: adjacency matrix and feature matrix of a molecule """ batch_size = z.shape[0] with chainer.no_backprop_mode(): z_x = chainer.as_variable(z[:, :self.x_size]) z_adj = chainer.as_variable(z[:, self.x_size:]) if true_adj is None: h_adj = F.reshape(z_adj, (batch_size, self.num_relations, self.num_nodes, self.num_nodes)) # First, the adjacency coupling layers are applied in reverse order to get h_adj for i in reversed(range(self.hyperparams.num_coupling['channel'], len(self.clinks))): h_adj, log_det_jacobians = self.clinks[i].reverse(h_adj) # make adjacency matrix from h_adj adj = h_adj adj += self.xp.transpose(adj, (0, 1, 3, 2)) adj = adj / 2 adj = F.softmax(adj, axis=1) max_bond = F.repeat(F.max(adj, axis=1).reshape(batch_size, -1, self.num_nodes, self.num_nodes), self.num_relations, axis=1) adj = F.floor(adj / max_bond) else: adj = true_adj h_x = F.reshape(z_x, (batch_size, self.num_nodes, self.num_features)) # channel coupling layers for i in reversed(range(self.hyperparams.num_coupling['channel'])): h_x, log_det_jacobians = self.clinks[i].reverse(h_x, adj) return adj, h_x