Python mean iou
25 Python code examples are found related to "
mean iou".
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: metric.py From pytorch_geometric with MIT License | 6 votes |
def mean_iou(pred, target, num_classes, batch=None): r"""Computes the mean intersection over union score of predictions. Args: pred (LongTensor): The predictions. target (LongTensor): The targets. num_classes (int): The number of classes. batch (LongTensor): The assignment vector which maps each pred-target pair to an example. :rtype: :class:`Tensor` """ i, u = intersection_and_union(pred, target, num_classes, batch) iou = i.to(torch.float) / u.to(torch.float) iou[torch.isnan(iou)] = 1 iou = iou.mean(dim=-1) return iou
Example 2
Source File: evaluate_sem_seg.py From deep-functional-dictionaries with MIT License | 6 votes |
def evaluate_mean_IoU(data, L, L_mask, L_pred, outputs): outputs['Category mIoU'] = {} # Per-category mean IoU for category_id in data.unique_category_ids: if data.category_names is not None: assert(category_id < len(data.category_names)) category_name = data.category_names[category_id] else: category_name = '{:02d}'.format(category_id) mask = np.where(data.category_ids == category_id)[0] category_mean_IoU = compute_mean_IoU( L[mask], L_mask[mask], L_pred[mask]) outputs['Category mIoU'][category_name] = category_mean_IoU # Total mean L_pred mean_IoU = compute_mean_IoU(L, L_mask, L_pred) outputs['Mean mIoU'] = mean_IoU return outputs
Example 3
Source File: loss_func.py From Keras-FCN with MIT License | 6 votes |
def mean_IoU(y_true, y_pred): s = K.shape(y_true) # reshape such that w and h dim are multiplied together y_true_reshaped = K.reshape( y_true, tf.stack( [-1, s[1]*s[2], s[-1]] ) ) y_pred_reshaped = K.reshape( y_pred, tf.stack( [-1, s[1]*s[2], s[-1]] ) ) # correctly classified clf_pred = K.one_hot( K.argmax(y_pred_reshaped), nb_classes = s[-1]) equal_entries = K.cast(K.equal(clf_pred,y_true_reshaped), dtype='float32') * y_true_reshaped intersection = K.sum(equal_entries, axis=1) union_per_class = K.sum(y_true_reshaped,axis=1) + K.sum(y_pred_reshaped,axis=1) iou = intersection / (union_per_class - intersection) iou_mask = tf.is_finite(iou) iou_masked = tf.boolean_mask(iou,iou_mask) return K.mean( iou_masked )
Example 4
Source File: evaluate_sem_seg.py From deep-functional-dictionaries with MIT License | 6 votes |
def compute_mean_IoU(L, L_mask, L_pred): n_data = L_pred.shape[0] n_labels = L_pred.shape[2] sum_mean_IoU = 0. for k in range(n_data): sum_IoU = 0. count = 0 for i in range(n_labels): # NOTE: # Ignore when the label does not exist in the shape. if not L_mask[k,i]: continue intersection = np.sum(np.logical_and(L[k,:,i], L_pred[k,:,i])) union = np.sum(np.logical_or(L[k,:,i], L_pred[k,:,i])) #assert(union > 0.) IoU = float(intersection) / float(union) if union > 0. else 1. sum_IoU += IoU count += 1 sum_mean_IoU += sum_IoU / float(count) mean_IoU = sum_mean_IoU / float(n_data) return mean_IoU
Example 5
Source File: metrics.py From pytorch-template with MIT License | 6 votes |
def mean_iou_50_to_95(outputs: torch.Tensor, labels: torch.Tensor, thresh=None, eps=1e-7, reduce=True): if thresh is not None: outputs = outputs > thresh outputs = outputs.squeeze(1) labels = labels.squeeze(1).byte() intersection = (outputs & labels).sum(dim=[1, 2]).float() union = (outputs | labels).sum(dim=[1, 2]).float() iou = (intersection + eps) / (union + eps) thresholded = torch.clamp(20 * (iou - 0.5), 0, 10).ceil() / 10 if reduce: thresholded = thresholded.mean() return thresholded
Example 6
Source File: combine_single_object_predictions_crf.py From PReMVOS with MIT License | 6 votes |
def calculate_mean_iou(seq, fn, result, n_objects): if "test" in SPLIT: return 0.0 mask_fn = DAVIS2017_DIR + "Annotations/480p/" + seq + "/" + fn.replace(".pickle", ".png") groundtruth_mask = numpy.array(Image.open(mask_fn)) iou_sum = 0.0 for n in range(n_objects): I = numpy.logical_and(result == n + 1, groundtruth_mask == n + 1).sum() U = numpy.logical_or(result == n + 1, groundtruth_mask == n + 1).sum() if U == 0: iou = 1.0 else: iou = float(I) / U iou_sum += iou return iou_sum / n_objects
Example 7
Source File: metric.py From Face-skin-hair-segmentaiton-and-skin-color-evaluation with Apache License 2.0 | 6 votes |
def mean_iou(y_true, y_pred, cls_num=CLS_NUM): result = 0 nc = tf.cast(tf.shape(y_true)[-1], tf.float32) for i in range(cls_num): # nii = number of pixels of classe i predicted to belong to class i nii = tf.reduce_sum(tf.round(tf.multiply( y_true[:, :, :, i], y_pred[:, :, :, i]))) ti = tf.reduce_sum(y_true[:, :, :, i]) # number of pixels of class i loc_sum = 0 for j in range(cls_num): # number of pixels of classe j predicted to belong to class i nji = tf.reduce_sum(tf.round(tf.multiply( y_true[:, :, :, j], y_pred[:, :, :, i]))) loc_sum += nji result += nii / (ti - nii + loc_sum) return (1 / nc) * result
Example 8
Source File: miou.py From Keras-LinkNet with MIT License | 5 votes |
def mean_iou(self, y_true, y_pred): """The metric function to be passed to the model. Args: y_true (tensor): True labels. y_pred (tensor): Predictions of the same shape as y_true. Returns: The mean intersection over union as a tensor. """ # Wraps _mean_iou function and uses it as a TensorFlow op. # Takes numpy arrays as its arguments and returns numpy arrays as # its outputs. return tf.py_func(self._mean_iou, [y_true, y_pred], tf.float32)
Example 9
Source File: metric.py From Visual-Template-Free-Form-Parsing with GNU General Public License v3.0 | 5 votes |
def meanIOU(y_output, y_target): assert len(y_output) == len(y_target) epsilon = 0.001 iouSum = 0 for out, targ in zip(y_output, y_target): binary = out>0 #torch.where(out>0,1,0) #binary = torch.round(y_input) #threshold at 0.5 intersection = (binary * targ).sum() union = (binary + targ).sum() - intersection iouSum += (intersection+epsilon) / (union+epsilon) return iouSum / float(len(y_output))
Example 10
Source File: postprocess0.py From 2018DSB with MIT License | 5 votes |
def compute_mean_iou(overlaps, return_list=False): miou = [] overlaps *= (overlaps>=.5) for step in np.arange(0.5,1,0.05): overlap_step = overlaps >=step tp = np.sum(np.sum(overlap_step,axis=0)>0) fp = np.sum(np.sum(overlap_step,axis=0)==0) fn = np.sum(np.sum(overlap_step,axis=-1)==0) miou.append(tp/(tp+fp+fn)) if return_list: return miou else: return np.mean(miou)
Example 11
Source File: segmentation.py From homura with Apache License 2.0 | 5 votes |
def mean_iou(input: torch.Tensor, target: torch.Tensor) -> torch.Tensor: """ Mean IoU :param input: logits (`BxCxHxW`) :param target: target in LongTensor (`BxHxW`) :return: """ return classwise_iou(input, target).mean()
Example 12
Source File: metrics.py From pytorch-template with MIT License | 5 votes |
def mean_iou(outputs, labels, n_classes=19): preds = outputs.argmax(dim=1) intersection, union = intersection_and_union(preds, labels, n_classes=n_classes) return np.mean(intersection / (union + 1e-16))
Example 13
Source File: confusion_matrix.py From pytorch-faster-rcnn with MIT License | 5 votes |
def mean_iou(self): '''meanIoU: ignore the label that isn't in imgs of gt''' total_iou = 0 count = 0 for i in range(self.size): I = self.diag[i] U = self.act_sum[i] + self.pre_sum[i] - I if U > 0: total_iou += I / U count += 1 return total_iou / count
Example 14
Source File: analyze.py From MonoGRNet with Apache License 2.0 | 5 votes |
def get_mean_iou(n): """ Get mean intersection over union from a confusion matrix n. Parameters ---------- n : dict Confusion matrix which has integer keys 0, ..., nb_classes - 1; an entry n[i][j] is the count how often class i was classified as class j. Returns ------- float mean intersection over union (in [0, 1]) Examples -------- >>> n = {0: {0: 10, 1: 2}, 1: {0: 5, 1: 83}} >>> get_mean_iou(n) 0.7552287581699346 """ t = [] k = len(n[0]) for i in range(k): t.append(sum([n[i][j] for j in range(k)])) return (1.0 / k) * sum([float(n[i][i]) / (t[i] - n[i][i] + sum([n[j][i] for j in range(k)])) for i in range(k)])
Example 15
Source File: metrics.py From RMI with MIT License | 5 votes |
def mean_iou_np(self, is_show_per_class=False): """compute mean iou with numpy""" sum_over_row = np.sum(self.confusion_matrix, axis=0).astype(float) sum_over_col = np.sum(self.confusion_matrix, axis=1).astype(float) cm_diag = np.diagonal(self.confusion_matrix).astype(float) denominator = sum_over_row + sum_over_col - cm_diag # The mean is only computed over classes that appear in the # label or prediction tensor. If the denominator is 0, we need to # ignore the class. num_valid_entries = np.sum((denominator != 0).astype(float)) # If the value of the denominator is 0, set it to 1 to avoid # zero division. denominator = np.where(denominator > 0, denominator, np.ones_like(denominator)) ious = cm_diag / denominator if is_show_per_class: print('\nIntersection over Union for each class:') for i, iou in enumerate(ious): print(' class {}: {:.4f}'.format(i, iou)) # If the number of valid entries is 0 (no classes) we return 0. m_iou = np.where(num_valid_entries > 0, np.sum(ious) / num_valid_entries, 0) m_iou = float(m_iou) if is_show_per_class: print('mean Intersection over Union: {:.4f}'.format(float(m_iou))) return m_iou
Example 16
Source File: metrics.py From keras-fcn with MIT License | 5 votes |
def Mean_IoU(classes): def mean_iou(y_true, y_pred): mean_iou, op = tf.metrics.mean_iou(y_true, y_pred, classes) return mean_iou _initialize_variables() return mean_iou
Example 17
Source File: metrics.py From seismic-deeplearning with MIT License | 5 votes |
def mean_iou(num_classes, output_transform=lambda x: x, device=None, ignore_index=None): """Calculates mean intersection-over-union Args: num_classes (int): number of classes output_transform (callable, optional): a callable that is used to transform the output into the form expected by the metric. Returns: MetricsLambda """ cm = ignite.metrics.ConfusionMatrix(num_classes=num_classes, output_transform=output_transform, device=device) return ignite.metrics.mIoU(cm, ignore_index=ignore_index)
Example 18
Source File: evaluation.py From blitznet with MIT License | 5 votes |
def compute_mean_iou(self): iou = self.detector.get_mean_iou() print(iou) log.info("\n Mean IoU is %f", iou) return iou
Example 19
Source File: eval.py From Single-Human-Parsing-LIP with MIT License | 5 votes |
def get_mean_acc_and_IoU(pred, gt, numClass): imPred = pred.copy() imLabel = gt.copy() imPred += 1 imLabel += 1 imPred = imPred * (imLabel > 0) # Compute area intersection: intersection = imPred * (imPred == imLabel) (area_intersection, _) = np.histogram( intersection, bins=numClass, range=(1, numClass)) # Compute area union: (area_pred, _) = np.histogram(imPred, bins=numClass, range=(1, numClass)) (area_label, _) = np.histogram(imLabel, bins=numClass, range=(1, numClass)) area_union = area_pred + area_label - area_intersection # Remove classes from unlabeled pixels in gt image. # We should not penalize detections in unlabeled portions of the image. valid = area_label > 0 # Compute mean acc. classes_acc = area_intersection / (area_label + 1e-10) mean_acc = np.average(classes_acc, weights=valid) # Compute intersection over union: IoU = area_intersection / (area_union + 1e-10) mean_IoU = np.average(IoU, weights=valid) return mean_acc, mean_IoU
Example 20
Source File: eval.py From Single-Human-Parsing-LIP with MIT License | 5 votes |
def get_mean_IoU(pred, gt, numClass): imPred = pred.copy() imLabel = gt.copy() imPred += 1 imLabel += 1 imPred = imPred * (imLabel > 0) # Compute area intersection: intersection = imPred * (imPred == imLabel) (area_intersection, _) = np.histogram( intersection, bins=numClass, range=(1, numClass)) # Compute area union: (area_pred, _) = np.histogram(imPred, bins=numClass, range=(1, numClass)) (area_label, _) = np.histogram(imLabel, bins=numClass, range=(1, numClass)) area_union = area_pred + area_label - area_intersection # Remove classes from unlabeled pixels in gt image. # We should not penalize detections in unlabeled portions of the image. valid = area_label > 0 # Compute intersection over union: IoU = area_intersection / (area_union + 1e-10) mean_IoU = np.average(IoU, weights=valid) return mean_IoU
Example 21
Source File: evaluate.py From deep-functional-dictionaries with MIT License | 5 votes |
def evaluate_mean_IoU_after_label_basis_matching( data, S_mask, S_to_L, IoU, outputs): outputs['Category mIoU after matching'] = {} # Per-category mean IoU after matching count = 0 sum_mean_IoU = 0. for category_id in data.unique_category_ids: if data.category_names is not None: assert(category_id < len(data.category_names)) category_name = data.category_names[category_id] else: category_name = '{:02d}'.format(category_id) mask = np.where(data.category_ids == category_id)[0] category_mean_IoU = compute_mean_IoU_after_label_basis_matching( S_mask[mask], S_to_L[mask], IoU[mask]) outputs['Category mIoU after matching'][category_name] = \ category_mean_IoU category_count = np.sum(mask) count += category_count sum_mean_IoU += (category_mean_IoU * category_count) # Total mean IoU mean_IoU = sum_mean_IoU / float(count) outputs['Mean mIoU after matching'] = mean_IoU return outputs
Example 22
Source File: metrics.py From Awesome-Segmentations with MIT License | 4 votes |
def mean_iou(y_true, y_pred): prec = [] for t in np.arange(0.5, 1.0, 0.05): y_pred_ = tf.to_int32(y_pred > t) score, up_opt = tf.metrics.mean_iou(y_true, y_pred_, 2) K.get_session().run(tf.local_variables_initializer()) with tf.control_dependencies([up_opt]): score = tf.identity(score) prec.append(score) return K.mean(K.stack(prec), axis=0)
Example 23
Source File: evaluate.py From deep-functional-dictionaries with MIT License | 4 votes |
def compute_mean_IoU_after_label_basis_matching(S_mask, S_to_L, IoU): n_data = IoU.shape[0] n_seg_ids = IoU.shape[1] K = IoU.shape[2] # Find the best mapping from labels to bases. # NOTE: # Temporarily set (max label + 1) as the number of labels. n_labels = np.amax(S_to_L) + 1 label_to_basis_sum_IoU = np.zeros((n_labels, K)) label_counts = np.zeros(n_labels, dtype=int) for k in range(n_data): for i in range(n_seg_ids): # NOTE: # Ignore when the label does not exist in the shape. if not S_mask[k,i]: assert(np.all(IoU[k,i,:] == 0.)) continue label = S_to_L[k,i] label_counts[label] += 1 for j in range(K): label_to_basis_sum_IoU[label,j] += IoU[k,i,j] label_to_basis_mean_IoU = label_to_basis_sum_IoU / \ np.expand_dims(label_counts, -1) labels = np.where(label_counts > 0)[0] label_to_basis_mean_IoU = label_to_basis_mean_IoU[labels, :] row_ind, col_ind = linear_sum_assignment(-label_to_basis_mean_IoU) label_to_basis = {} for (i, j) in zip(row_ind, col_ind): label_to_basis[labels[i]] = j sum_mean_IoU = 0. for k in range(n_data): assert(K >= np.sum(S_mask[k])) sum_IoU = 0. count = 0 for i in range(n_seg_ids): if not S_mask[k,i]: continue label = S_to_L[k,i] j = label_to_basis[label] sum_IoU += IoU[k,i,j] count += 1 sum_mean_IoU += sum_IoU / float(count) mean_IoU = sum_mean_IoU / float(n_data) return mean_IoU
Example 24
Source File: mean_IoU.py From kaggle-dsb2018 with Apache License 2.0 | 4 votes |
def mean_IoU(Y_true, Y_pred): """ Calculate the mean IoU score between two lists of labeled masks. :param Y_true: a list of labeled masks (numpy arrays) - the ground truth :param Y_pred: a list labeled predicted masks (numpy arrays) for images with the original dimensions :return: mean IoU score for corresponding images """ image_precs = [] for y_true,y_pred in zip(Y_true,Y_pred): true_objects = len(np.unique(y_true)) pred_objects = len(np.unique(y_pred)) # Compute intersection between all objects intersection = np.histogram2d(y_true.flatten(), y_pred.flatten(), bins=(true_objects, pred_objects))[0] # Compute areas (needed for finding the union between all objects) area_true = np.histogram(y_true, bins = true_objects)[0] area_pred = np.histogram(y_pred, bins = pred_objects)[0] area_true = np.expand_dims(area_true, -1) area_pred = np.expand_dims(area_pred, 0) # Compute union union = area_true + area_pred - intersection # Exclude background from the analysis intersection = intersection[1:,1:] union = union[1:,1:] union[union == 0] = 1e-9 # Compute the intersection over union iou = intersection / union # Precision helper function def precision_at(threshold, iou): matches = iou > threshold true_positives = np.sum(matches, axis=1) == 1 # Correct objects false_positives = np.sum(matches, axis=0) == 0 # Missed objects false_negatives = np.sum(matches, axis=1) == 0 # Extra objects tp, fp, fn = np.sum(true_positives), np.sum(false_positives), np.sum(false_negatives) return tp, fp, fn # Loop over IoU thresholds prec = [] for t in np.arange(0.5, 1.0, 0.05): tp, fp, fn = precision_at(t, iou) p = tp / (tp + fp + fn) prec.append(p) image_precs.append(prec) return [np.mean(image_precs), image_precs]