Python fast_rcnn.bbox_transform.clip_boxes() Examples

The following are 3 code examples of fast_rcnn.bbox_transform.clip_boxes(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module fast_rcnn.bbox_transform , or try the search function .
Example #1
Source File: faster_rcnn.py    From faster_rcnn_pytorch with MIT License 6 votes vote down vote up
def interpret_faster_rcnn(self, cls_prob, bbox_pred, rois, im_info, im_shape, nms=True, clip=True, min_score=0.0):
        # find class
        scores, inds = cls_prob.data.max(1)
        scores, inds = scores.cpu().numpy(), inds.cpu().numpy()

        keep = np.where((inds > 0) & (scores >= min_score))
        scores, inds = scores[keep], inds[keep]

        # Apply bounding-box regression deltas
        keep = keep[0]
        box_deltas = bbox_pred.data.cpu().numpy()[keep]
        box_deltas = np.asarray([
            box_deltas[i, (inds[i] * 4): (inds[i] * 4 + 4)] for i in range(len(inds))
        ], dtype=np.float)
        boxes = rois.data.cpu().numpy()[keep, 1:5] / im_info[0][2]
        pred_boxes = bbox_transform_inv(boxes, box_deltas)
        if clip:
            pred_boxes = clip_boxes(pred_boxes, im_shape)

        # nms
        if nms and pred_boxes.shape[0] > 0:
            pred_boxes, scores, inds = nms_detections(pred_boxes, scores, 0.3, inds=inds)

        return pred_boxes, scores, self.classes[inds] 
Example #2
Source File: test.py    From scene-graph-TF-release with MIT License 4 votes vote down vote up
def im_detect(sess, net, inputs, im, boxes, bbox_reg, multi_iter):
    blobs, im_scales = _get_blobs(im, boxes)

    relations = []
    for i in range(boxes.shape[0]):
        for j in range(boxes.shape[0]):
            if i != j:
                relations.append([i, j])
    relations = np.array(relations, dtype=np.int32) # all possible combinations
    num_roi = blobs['rois'].shape[0]
    num_rel = relations.shape[0]

    inputs_feed = data_utils.create_graph_data(num_roi, num_rel, relations)

    feed_dict = {inputs['ims']: blobs['data'],
                 inputs['rois']: blobs['rois'],
                 inputs['relations']: relations,
                 net.keep_prob: 1}

    for k in inputs_feed:
        feed_dict[inputs[k]] = inputs_feed[k]

    # compute relation rois
    feed_dict[inputs['rel_rois']] = \
        data_utils.compute_rel_rois(num_rel, blobs['rois'], relations)

    ops = {}

    ops['bbox_deltas'] = net.bbox_pred_output(multi_iter)
    ops['rel_probs'] = net.rel_pred_output(multi_iter)
    ops['cls_probs'] = net.cls_pred_output(multi_iter)

    ops_value = sess.run(ops, feed_dict=feed_dict)

    out_dict = {}
    for mi in multi_iter:
        rel_probs = None
        rel_probs_flat = ops_value['rel_probs'][mi]
        rel_probs = np.zeros([num_roi, num_roi, rel_probs_flat.shape[1]])
        for i, rel in enumerate(relations):
            rel_probs[rel[0], rel[1], :] = rel_probs_flat[i, :]

        cls_probs = ops_value['cls_probs'][mi]

        if bbox_reg:
            # Apply bounding-box regression deltas
            pred_boxes = bbox_transform_inv(boxes, ops_value['bbox_deltas'][mi])
            pred_boxes = clip_boxes(pred_boxes, im.shape)
        else:
            # Simply repeat the boxes, once for each class
            pred_boxes = np.tile(boxes, (1, cls_probs.shape[1]))

        out_dict[mi] = {'scores': cls_probs.copy(),
                        'boxes': pred_boxes.copy(),
                        'relations': rel_probs.copy()}
    return out_dict 
Example #3
Source File: makebboxproposals.py    From orientation-aware-firearm-detection with MIT License 4 votes vote down vote up
def forward(self, bottom, top):


        #assert bottom[0].data.shape[0] == 1, \
        #    'Only single item batches are supported'


        # the first set of _num_anchors channels are bg probs
        # the second set are the fg probs, which we want
        rpn_boxes_Actual = bottom[0].data
        box_deltas = bottom[1].data
	im_info = bottom[2].data
	scores = bottom[3].data


	#print('im_info : ', im_info)

	im_scales = im_info[0][2]

	im_shape = np.array([im_info[0][0], im_info[0][1]]) / im_scales
	#print('conv_feat : ', conv_feat.shape)
	#print('rpnBoxes : ', rpn_boxes.shape)
	

	#for idx in range(len(cls_idx)):
		#cls_boxes = final_boxes[inds, j*4:(j+1)*4]


	rpn_boxes = rpn_boxes_Actual[:, 1:5] / im_scales
	pred_boxes = bbox_transform_inv(rpn_boxes, box_deltas)
        pred_boxes = clip_boxes(pred_boxes, im_shape)

	cls_idx = np.argmax(scores, axis = 1)

	#print('cls_idx', cls_idx.shape, cls_idx )

	#cls_idx = cls_idx.reshape(len(cls_idx), 1)
	#print('cls_idx', cls_idx.shape)
	#pred_boxes = pred_boxes[:, cls_idx*4:(cls_idx+1)*4]
	temp = np.zeros((len(cls_idx), 5))

	for idx in range(len(cls_idx)):
		#print(cls_idx[idx])
		temp[idx,1:] = pred_boxes[idx, cls_idx[idx]*4:(cls_idx[idx]+1)*4]

	
	pred_boxes = temp * im_scales
	#addd = cls_idx >0
	#print('Compare :', rpn_boxes[cls_idx>0,:], temp[cls_idx>0,:])

	#rpn_boxes_Actual[:,1:5] = pred_boxes

	top[0].reshape(*(pred_boxes.shape))
        top[0].data[...] = pred_boxes