Python utils.log_sum_exp() Examples

The following are 4 code examples of utils.log_sum_exp(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module utils , or try the search function .
Example #1
Source File: variational.py    From Deep-SAD-PyTorch with MIT License 5 votes vote down vote up
def __call__(self, elbo):
        elbo = elbo.view(self.mc, self.iw, -1)
        elbo = torch.mean(log_sum_exp(elbo, dim=1, sum_op=torch.mean), dim=0)
        return elbo.view(-1) 
Example #2
Source File: flows.py    From torchkit with MIT License 5 votes vote down vote up
def forward(self, x, logdet, dsparams, mollify=0.0, delta=nn_.delta):
        
        ndim = self.num_ds_dim
        a_ = self.act_a(dsparams[:,:,0*ndim:1*ndim])
        b_ = self.act_b(dsparams[:,:,1*ndim:2*ndim])
        w = self.act_w(dsparams[:,:,2*ndim:3*ndim])
        
        a = a_ * (1-mollify) + 1.0 * mollify
        b = b_ * (1-mollify) + 0.0 * mollify
        
        pre_sigm = a * x[:,:,None] + b
        sigm = torch.sigmoid(pre_sigm)
        x_pre = torch.sum(w*sigm, dim=2)
        x_pre_clipped = x_pre * (1-delta) + delta * 0.5
        x_ = log(x_pre_clipped) - log(1-x_pre_clipped)
        xnew = x_
        
        logj = F.log_softmax(dsparams[:,:,2*ndim:3*ndim], dim=2) + \
            nn_.logsigmoid(pre_sigm) + \
            nn_.logsigmoid(-pre_sigm) + log(a)

        logj = utils.log_sum_exp(logj,2).sum(2)
        logdet_ = logj + np.log(1-delta) - \
        (log(x_pre_clipped) + log(-x_pre_clipped+1))
        logdet = logdet_.sum(1) + logdet
        
        
        return xnew, logdet 
Example #3
Source File: flows.py    From torchkit with MIT License 5 votes vote down vote up
def forward(self, x, logdet, dsparams):
        inv = np.log(np.exp(1-nn_.delta)-1) 
        ndim = self.hidden_dim
        pre_u = self.u_[None,None,:,:]+dsparams[:,:,-self.in_dim:][:,:,None,:]
        pre_w = self.w_[None,None,:,:]+dsparams[:,:,2*ndim:3*ndim][:,:,None,:]
        a = self.act_a(dsparams[:,:,0*ndim:1*ndim]+inv)
        b = self.act_b(dsparams[:,:,1*ndim:2*ndim])
        w = self.act_w(pre_w)
        u = self.act_u(pre_u)
        
        
        pre_sigm = torch.sum(u * a[:,:,:,None] * x[:,:,None,:], 3) + b
        sigm = torch.sigmoid(pre_sigm)
        x_pre = torch.sum(w*sigm[:,:,None,:], dim=3)
        x_pre_clipped = x_pre * (1-nn_.delta) + nn_.delta * 0.5
        x_ = log(x_pre_clipped) - log(1-x_pre_clipped)
        xnew = x_
        
        logj = F.log_softmax(pre_w, dim=3) + \
            nn_.logsigmoid(pre_sigm[:,:,None,:]) + \
            nn_.logsigmoid(-pre_sigm[:,:,None,:]) + log(a[:,:,None,:])
        # n, d, d2, dh
        
        logj = logj[:,:,:,:,None] + F.log_softmax(pre_u, dim=3)[:,:,None,:,:]
        # n, d, d2, dh, d1
        
        logj = utils.log_sum_exp(logj,3).sum(3)
        # n, d, d2, d1
        
        logdet_ = logj + np.log(1-nn_.delta) - \
            (log(x_pre_clipped) + log(-x_pre_clipped+1))[:,:,:,None]
        
        
        logdet = utils.log_sum_exp(
            logdet_[:,:,:,:,None] + logdet[:,:,None,:,:], 3).sum(3)
        # n, d, d2, d1, d0 -> n, d, d2, d0
            
        return xnew, logdet 
Example #4
Source File: variational.py    From semi-supervised-pytorch with MIT License 5 votes vote down vote up
def __call__(self, elbo):
        elbo = elbo.view(self.mc, self.iw, -1)
        elbo = torch.mean(log_sum_exp(elbo, dim=1, sum_op=torch.mean), dim=0)
        return elbo.view(-1)