Python torch.any() Examples

The following are 30 code examples of torch.any(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module torch , or try the search function .
Example #1
Source File: preprocessing.py    From torchlayers with MIT License 6 votes vote down vote up
def __init__(self, mean: torch.Tensor, std: torch.Tensor, inplace: bool = False):
        tensor_mean = Normalize._transform_to_tensor(mean, "mean")
        tensor_std = Normalize._transform_to_tensor(std, "std")
        Normalize._check_shape(tensor_mean, "mean")
        Normalize._check_shape(tensor_std, "std")

        if torch.any(tensor_std == 0):
            raise ValueError(
                "One or more std values are zero which would lead to division by zero."
            )

        super().__init__()

        self.register_buffer("mean", tensor_mean)
        self.register_buffer("std", tensor_std)
        self.inplace: bool = inplace 
Example #2
Source File: test_assigner.py    From mmdetection with Apache License 2.0 6 votes vote down vote up
def test_center_region_assigner():
    self = CenterRegionAssigner(pos_scale=0.3, neg_scale=1)
    bboxes = torch.FloatTensor([[0, 0, 10, 10], [10, 10, 20, 20], [8, 8, 9,
                                                                   9]])
    gt_bboxes = torch.FloatTensor([
        [0, 0, 11, 11],  # match bboxes[0]
        [10, 10, 20, 20],  # match bboxes[1]
        [4.5, 4.5, 5.5, 5.5],  # match bboxes[0] but area is too small
        [0, 0, 10, 10],  # match bboxes[1] and has a smaller area than gt[0]
    ])
    gt_labels = torch.LongTensor([2, 3, 4, 5])
    assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels)
    assert len(assign_result.gt_inds) == 3
    assert len(assign_result.labels) == 3
    expected_gt_inds = torch.LongTensor([4, 2, 0])
    assert torch.all(assign_result.gt_inds == expected_gt_inds)
    shadowed_labels = assign_result.get_extra_property('shadowed_labels')
    # [8, 8, 9, 9] in the shadowed region of [0, 0, 11, 11] (label: 2)
    assert torch.any(shadowed_labels == torch.LongTensor([[2, 2]]))
    # [8, 8, 9, 9] in the shadowed region of [0, 0, 10, 10] (label: 5)
    assert torch.any(shadowed_labels == torch.LongTensor([[2, 5]]))
    # [0, 0, 10, 10] is already assigned to [4.5, 4.5, 5.5, 5.5].
    #   Therefore, [0, 0, 11, 11] (label: 2) is shadowed
    assert torch.any(shadowed_labels == torch.LongTensor([[0, 2]])) 
Example #3
Source File: iou_loss.py    From GCNet with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #4
Source File: iou_loss.py    From mmdetection-annotated with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #5
Source File: weighted.py    From torchio with MIT License 6 votes vote down vote up
def __call__(
            self,
            sample: Subject,
            num_patches: Optional[int] = None,
            ) -> Generator[Subject, None, None]:
        sample.check_consistent_shape()
        if np.any(self.patch_size > sample.spatial_shape):
            message = (
                f'Patch size {tuple(self.patch_size)} cannot be'
                f' larger than image size {tuple(sample.spatial_shape)}'
            )
            raise RuntimeError(message)
        probability_map = self.get_probability_map(sample)
        probability_map = self.process_probability_map(probability_map)
        cdf, sort_indices = self.get_cumulative_distribution_function(
            probability_map)

        patches_left = num_patches if num_patches is not None else True
        while patches_left:
            yield self.extract_patch(sample, probability_map, cdf, sort_indices)
            if num_patches is not None:
                patches_left -= 1 
Example #6
Source File: iou_loss.py    From mmdetection-annotated with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #7
Source File: iou_loss.py    From PolarMask with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #8
Source File: function_test.py    From Pytorch_Quantize_impls with MIT License 6 votes vote down vote up
def test_terner_connect_sto_forward():
    x = torch.Tensor([1,0,0.45,-1,-0.9]).view(1,-1)

    results = list()
    for i in range(1000):
        temp_result = TernaryConnectStochastic.apply(x)
        # Tensor must have only -1 , 0 , 1 values
        assert not torch.any(torch.lt(torch.abs(temp_result-1),1e-8)*torch.lt(torch.abs(temp_result),1e-8))
        results.append(temp_result) 

    result = torch.cat(results,0 )
    result = torch.sum(result, 0)/1000
    
    assert equals(
        result,
        torch.Tensor([1,0,0.45,-1,-0.9]).view(1,-1),
        5e-2) 
Example #9
Source File: multitask_classifier.py    From snorkel with Apache License 2.0 6 votes vote down vote up
def _get_labels_to_tasks(
        self, label_names: Iterable[str], remap_labels: Dict[str, Optional[str]] = {}
    ) -> Dict[str, str]:
        """Map each label to its corresponding task outputs based on whether the task is available.

        If remap_labels specified, overrides specific label -> task mappings.
        If a label is mappied to `None`, that key is removed from the mapping.
        """
        labels_to_tasks = {}
        for label in label_names:
            # Override any existing label -> task mappings
            if label in remap_labels:
                task = remap_labels.get(label)
                # Note: task might be manually remapped to None to remove it from the labels_to_tasks
                if task is not None:
                    labels_to_tasks[label] = task

            # If available in task flows, label should map to task of same name
            elif label in self.op_sequences:
                labels_to_tasks[label] = label

        return labels_to_tasks 
Example #10
Source File: iou_loss.py    From kaggle-kuzushiji-recognition with MIT License 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #11
Source File: iou_loss.py    From kaggle-kuzushiji-recognition with MIT License 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #12
Source File: iou_loss.py    From RDSNet with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #13
Source File: iou_loss.py    From RDSNet with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #14
Source File: iou_loss.py    From IoU-Uniform-R-CNN with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #15
Source File: iou_loss.py    From IoU-Uniform-R-CNN with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #16
Source File: iou_loss.py    From mmdetection with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #17
Source File: cluster.py    From vamb with MIT License 6 votes vote down vote up
def __next__(self):
        # Stop criterion. For CUDA, inplace masking the array is too slow, so the matrix is
        # unchanged. On CPU, we continually modify the matrix by removing rows.
        if self.CUDA:
            if not _torch.any(self.kept_mask).item():
                raise StopIteration
        elif len(self.matrix) == 0:
            raise StopIteration

        cluster, medoid, points = self._findcluster()
        self.nclusters += 1

        for point in points:
            self.kept_mask[point] = 0

        # Remove all points that's been clustered away. Is slow it itself, but speeds up
        # distance calculation by having fewer points. Worth it on CPU, not on GPU
        if not self.CUDA:
            _vambtools.torch_inplace_maskarray(self.matrix, self.kept_mask)
            self.indices = self.indices[self.kept_mask] # no need to inplace mask small array
            self.kept_mask.resize_(len(self.matrix))
            self.kept_mask[:] = 1

        return cluster 
Example #18
Source File: cluster.py    From vamb with MIT License 6 votes vote down vote up
def _normalize(matrix, inplace=False):
    """Preprocess the matrix to make distance calculations faster.
    The distance functions in this module assumes input has been normalized
    and will not work otherwise.
    """
    if isinstance(matrix, _np.ndarray):
        matrix = _torch.from_numpy(matrix)

    if not inplace:
        matrix = matrix.clone()

    # If any rows are kept all zeros, the distance function will return 0.5 to all points
    # inclusive itself, which can break the code in this module
    zeromask = matrix.sum(dim=1) == 0
    matrix[zeromask] = 1/matrix.shape[1]
    matrix /= (matrix.norm(dim=1).reshape(-1, 1) * (2 ** 0.5))
    return matrix 
Example #19
Source File: iou_loss.py    From Libra_R-CNN with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #20
Source File: iou_loss.py    From Libra_R-CNN with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #21
Source File: iou_loss.py    From FoveaBox with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #22
Source File: iou_loss.py    From FoveaBox with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #23
Source File: iou_loss.py    From Cascade-RPN with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        if weight is not None:  # iou loss is single unit
            weight = weight[:, 0]
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            linear=self.linear,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #24
Source File: iou_loss.py    From Cascade-RPN with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #25
Source File: iou_loss.py    From Feature-Selective-Anchor-Free-Module-for-Single-Shot-Object-Detection with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #26
Source File: iou_loss.py    From Feature-Selective-Anchor-Free-Module-for-Single-Shot-Object-Detection with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #27
Source File: iou_loss.py    From CenterNet with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #28
Source File: iou_loss.py    From CenterNet with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * bounded_iou_loss(
            pred,
            target,
            weight,
            beta=self.beta,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #29
Source File: linear_iou_loss.py    From CenterNet with Apache License 2.0 6 votes vote down vote up
def forward(self,
                pred,
                target,
                weight=None,
                avg_factor=None,
                reduction_override=None,
                **kwargs):
        if weight is not None and not torch.any(weight > 0):
            return (pred * weight).sum()  # 0
        assert reduction_override in (None, 'none', 'mean', 'sum')
        reduction = (
            reduction_override if reduction_override else self.reduction)
        loss = self.loss_weight * linear_iou_loss(
            pred,
            target,
            weight,
            eps=self.eps,
            reduction=reduction,
            avg_factor=avg_factor,
            **kwargs)
        return loss 
Example #30
Source File: test_cross_batch_memory.py    From pytorch-metric-learning with MIT License 6 votes vote down vote up
def test_input_indices_tuple(self):
        batch_size = 32
        pair_miner = PairMarginMiner(pos_margin=0, neg_margin=1, use_similarity=False)
        triplet_miner = TripletMarginMiner(margin=1)
        self.loss = CrossBatchMemory(loss=ContrastiveLoss(), embedding_size=self.embedding_size, memory_size=self.memory_size)
        for i in range(30):
            embeddings = torch.randn(batch_size, self.embedding_size)
            labels = torch.arange(batch_size)
            self.loss(embeddings, labels)
            for curr_miner in [pair_miner, triplet_miner]:
                input_indices_tuple = curr_miner(embeddings, labels)
                all_labels = torch.cat([labels, self.loss.label_memory], dim=0)
                a1ii, pii, a2ii, nii = lmu.convert_to_pairs(input_indices_tuple, labels)
                a1i, pi, a2i, ni = lmu.get_all_pairs_indices(labels, self.loss.label_memory)
                a1, p, a2, n = self.loss.create_indices_tuple(batch_size, embeddings, labels, self.loss.embedding_memory, self.loss.label_memory, input_indices_tuple)
                self.assertTrue(not torch.any((all_labels[a1]-all_labels[p]).bool()))
                self.assertTrue(torch.all((all_labels[a2]-all_labels[n]).bool()))
                self.assertTrue(len(a1) == len(a1i)+len(a1ii))
                self.assertTrue(len(p) == len(pi)+len(pii))
                self.assertTrue(len(a2) == len(a2i)+len(a2ii))
                self.assertTrue(len(n) == len(ni)+len(nii))