Python mxnet.ndarray.mean() Examples
The following are 7
code examples of mxnet.ndarray.mean().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
mxnet.ndarray
, or try the search function
.
Example #1
Source File: tensor.py From dgl with Apache License 2.0 | 5 votes |
def mean(input, dim): return nd.mean(input, axis=dim)
Example #2
Source File: tensor.py From dgl with Apache License 2.0 | 5 votes |
def reduce_mean(input): return input.mean()
Example #3
Source File: tensor.py From dgl with Apache License 2.0 | 5 votes |
def unsorted_1d_segment_mean(input, seg_id, n_segs, dim): # TODO: support other dimensions assert dim == 0, 'MXNet only supports segment mean on first dimension' n_ones = nd.ones_like(seg_id).astype(input.dtype) w = unsorted_1d_segment_sum(n_ones, seg_id, n_segs, 0) w = nd.clip(w, a_min=1, a_max=np.inf) y = unsorted_1d_segment_sum(input, seg_id, n_segs, dim) y = y / w.reshape((-1,) + (1,) * (y.ndim - 1)) return y
Example #4
Source File: tensor.py From dgl with Apache License 2.0 | 5 votes |
def backward(self, grad_out): lhs_data_nd, rhs_data_nd, out_data_nd, feat_shape, degs = self.saved_tensors if self.reducer == 'mean': grad_out = grad_out / degs grad_out_nd = zerocopy_to_dgl_ndarray(grad_out) grad_lhs = nd.empty((lhs_data_nd.shape[0],) + feat_shape, ctx=grad_out.context, dtype=grad_out.dtype) K.backward_lhs_binary_op_reduce( self.reducer if self.reducer != 'mean' else 'sum', self.binary_op, self.graph, self.lhs, self.rhs, lhs_data_nd, rhs_data_nd, out_data_nd, grad_out_nd, zerocopy_to_dgl_ndarray_for_write(grad_lhs), self.lhs_map[1], self.rhs_map[1], self.out_map[1]) grad_lhs = _reduce_grad(grad_lhs, lhs_data_nd.shape) grad_rhs = nd.empty((rhs_data_nd.shape[0],) + feat_shape, ctx=grad_out.context, dtype=grad_out.dtype) K.backward_rhs_binary_op_reduce( self.reducer if self.reducer != 'mean' else 'sum', self.binary_op, self.graph, self.lhs, self.rhs, lhs_data_nd, rhs_data_nd, out_data_nd, grad_out_nd, zerocopy_to_dgl_ndarray_for_write(grad_rhs), self.lhs_map[1], self.rhs_map[1], self.out_map[1]) grad_rhs = _reduce_grad(grad_rhs, rhs_data_nd.shape) # clear saved tensors explicitly self.saved_tensors = None return grad_lhs, grad_rhs
Example #5
Source File: tensor.py From dgl with Apache License 2.0 | 5 votes |
def backward(self, grad_out): in_data_nd, out_data_nd, degs = self.saved_tensors grad_in = nd.empty(in_data_nd.shape, ctx=grad_out.context, dtype=grad_out.dtype) if self.reducer == 'mean': grad_out = grad_out / degs grad_out_nd = zerocopy_to_dgl_ndarray(grad_out) K.backward_copy_reduce( self.reducer if self.reducer != 'mean' else 'sum', self.graph, self.target, in_data_nd, out_data_nd, grad_out_nd, zerocopy_to_dgl_ndarray_for_write(grad_in), self.in_map[1], self.out_map[1]) # clear saved tensors explicitly self.saved_tensors = None return grad_in
Example #6
Source File: custom_layers.py From d-SNE with Apache License 2.0 | 5 votes |
def hybrid_forward(self, F, input_logits, target_logits, sample_weight=None): input_softmax = F.softmax(input_logits, axis=1) target_softmax = F.softmax(target_logits, axis=1) loss = F.square(input_softmax - target_softmax) return F.mean(loss, axis=self._batch_axis, exclude=True)
Example #7
Source File: tensor.py From dgl with Apache License 2.0 | 4 votes |
def forward(self, lhs_data, rhs_data): lhs_data_nd = zerocopy_to_dgl_ndarray(lhs_data) rhs_data_nd = zerocopy_to_dgl_ndarray(rhs_data) feat_shape = K.infer_binary_feature_shape(self.binary_op, lhs_data_nd, rhs_data_nd) out_shape = feat_shape if self.binary_op == 'dot': out_shape = feat_shape[:-1] out_data = nd.empty((self.out_size,) + out_shape, ctx=lhs_data.context, dtype=lhs_data.dtype) out_data_nd = zerocopy_to_dgl_ndarray_for_write(out_data) K.binary_op_reduce( self.reducer if self.reducer != 'mean' else 'sum', self.binary_op, self.graph, self.lhs, self.rhs, lhs_data_nd, rhs_data_nd, out_data_nd, self.lhs_map[0], self.rhs_map[0], self.out_map[0]) # normalize if mean reducer # NOTE(zihao): this is a temporary hack and we should have better solution in the future. if self.reducer == 'mean': degs = nd.empty((out_data.shape[0],), ctx=out_data.context, dtype=out_data.dtype) degs_nd = zerocopy_to_dgl_ndarray(degs) if self.lhs != TargetCode.DST: target = self.lhs n = lhs_data.shape[0] in_map = self.lhs_map[0] else: target = self.rhs n = rhs_data.shape[0] in_map = self.rhs_map[0] in_ones = nd.ones((n,), ctx=lhs_data.context, dtype=lhs_data.dtype) in_ones_nd = zerocopy_to_dgl_ndarray(in_ones) K.copy_reduce( 'sum', self.graph, target, in_ones_nd, degs_nd, in_map, self.out_map[0]) # reshape degs = degs.reshape((out_data.shape[0],) + (1,) * (out_data.ndim - 1)).clip(1, float('inf')) out_data = out_data / degs else: degs = None self.save_for_backward(lhs_data_nd, rhs_data_nd, out_data_nd, feat_shape, degs) return out_data