Python torch.functional() Examples
The following are 2
code examples of torch.functional().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
torch
, or try the search function
.
Example #1
Source File: source_gen.py From pytorch-mcn with MIT License | 5 votes |
def build_header_str(net_name, rgb_mean, rgb_std, im_size, uses_functional, debug_mode): """Generate source code header - constructs the header source code for the network definition file. Args: net_name (str): name of the network architecture debug_mode (bool): whether to generate additional debugging code rgb_mean (List): average rgb image of training data rgb_std (List): standard deviation of rgb images in training data im_size (List): spatial dimensions of the training input image size uses_functional (bool): whether the network requires the torch.functional module Returns: (str) : source code header string. """ imports = ''' import torch import torch.nn as nn ''' if uses_functional: imports = imports + ''' import torch.nn.functional as F ''' header = imports + ''' class {0}(nn.Module): def __init__(self): super({0}, self).__init__() self.meta = {{'mean': {1}, 'std': {2}, 'imageSize': {3}}} ''' if debug_mode: header = header + ''' from collections import OrderedDict self.debug_feats = OrderedDict() # only used for feature verification ''' return header.format(net_name, rgb_mean, rgb_std, im_size)
Example #2
Source File: supported_layers_grad_samplers.py From pytorch-dp with Apache License 2.0 | 5 votes |
def _compute_conv_grad_sample(layer, A, B, batch_dim=0): n = A.shape[0] layer_type = get_layer_type(layer) # get A and B in shape depending on the Conv layer if layer_type == "Conv2d": A = torch.nn.functional.unfold( A, layer.kernel_size, padding=layer.padding, stride=layer.stride ) B = B.reshape(n, -1, A.shape[-1]) elif layer_type == "Conv1d": # unfold doesn't work for 3D tensors; so force it to be 4D A = A.unsqueeze(-2) # add the H dimension # set arguments to tuples with appropriate second element A = torch.nn.functional.unfold( A, (1, layer.kernel_size[0]), padding=(0, layer.padding[0]), stride=(1, layer.stride[0]), ) B = B.reshape(n, -1, A.shape[-1]) try: # n=batch_sz; o=num_out_channels; p=num_in_channels*kernel_sz grad_sample = ( torch.einsum("noq,npq->nop", B, A) if layer.groups == 1 else torch.einsum("njk,njk->nj", B, A) ) shape = [n] + list(layer.weight.shape) _create_or_extend_grad_sample( layer.weight, grad_sample.reshape(shape), batch_dim ) except Exception as e: raise type(e)( f"{e} There is probably a problem with {layer_type}.groups" + "It should be either 1 or in_channel" ) if layer.bias is not None: _create_or_extend_grad_sample(layer.bias, torch.sum(B, dim=2), batch_dim)