Python cv2.applyColorMap() Examples
The following are 30
code examples of cv2.applyColorMap().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
cv2
, or try the search function
.
Example #1
Source File: h5_test.py From keras-image-segmentation with MIT License | 6 votes |
def read_h5py_example(): h5_in = h5py.File(os.path.join(dir_path, 'data.h5'), 'r') print (h5_in.keys()) print (h5_in['train']['image'].dtype) print (h5_in['train']['image'][0].shape) image_size = h5_in['train']['image'].attrs['size'] label_size = h5_in['train']['label'].attrs['size'] x_img = np.reshape(h5_in['train']['image'][0], tuple(image_size)) y_img = np.reshape(h5_in['train']['label'][0], tuple(label_size)) name = h5_in['train']['name'][0] print (name) y_img = (y_img.astype(np.float32)*255/33).astype(np.uint8) y_show = cv2.applyColorMap(y_img, cv2.COLORMAP_JET) show = cv2.addWeighted(x_img, 0.5, y_show, 0.5, 0) cv2.imshow("show", show) cv2.waitKey()
Example #2
Source File: utility.py From hmd with MIT License | 6 votes |
def draw_anchors_rect(img_arr, anchor_posi, sample = 1, ratio = 1): ori_dtype = img_arr.dtype joint_num = len(anchor_posi) seed_arr = np.array([range(1,255,255/joint_num)]).astype(np.uint8) color_list = cv2.applyColorMap(seed_arr, cv2.COLORMAP_RAINBOW)[0] draw_arr = img_arr.astype(np.float) for i in range(joint_num): if (i%sample)!=0: continue draw_arr = draw_rect(draw_arr, anchor_posi[i], size = 32, color = color_list[i].tolist()) if ratio < 1: draw_arr = draw_arr*ratio + img_arr.astype(np.float)*(1-ratio) return draw_arr.astype(ori_dtype) # write OBJ from vertex # not tested yet
Example #3
Source File: trial.py From Pytorch-Human-Pose-Estimation with MIT License | 6 votes |
def draw_heatmaps(heatmaps, image, index): img = image #print(img.max(), img.min(), img.std(), img.mean()) img = np.array(255*img.transpose(1, 2, 0), dtype = np.uint8) #img = cv2.resize(img, (heatmaps.shape[1], heatmaps.shape[1])) #print(img.shape, img.max(), img.min(), img.mean(), img.std()) #print(img.shape) #print(heatmaps.shape[0]) for i in range(heatmaps.shape[0]): #current = cv2.applyColorMap(heatmaps[i, :, :], cv2.COLORMAP_JET) current = heatmaps[i, :, :] current = cv2.resize(current, (img.shape[0], img.shape[1])) #print(current.shape) #print(current.mean()) #print(current.std()) #print(img.max()) plt.imshow(img) plt.imshow(current, alpha = 0.5) plt.savefig('debug/' + str(index) + '_' + str(i) + '.png') print("saved", str(index))
Example #4
Source File: my.py From 3D-HourGlass-Network with MIT License | 6 votes |
def test_heatmaps(heatmaps,img,i): heatmaps=heatmaps.numpy() #heatmaps=np.squeeze(heatmaps) heatmaps=heatmaps[:,:64,:] heatmaps=heatmaps.transpose(1,2,0) print('heatmap inside shape is',heatmaps.shape) ## print('----------------here') ## print(heatmaps.shape) img=img.numpy() #img=np.squeeze(img) img=img.transpose(1,2,0) img=cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # print('heatmaps',heatmaps.shape) heatmaps = cv2.resize(heatmaps,(0,0), fx=4,fy=4) # print('heatmapsafter',heatmaps.shape) for j in range(0, 16): heatmap = heatmaps[:,:,j] heatmap = heatmap.reshape((256,256,1)) heatmapimg = np.array(heatmap * 255, dtype = np.uint8) heatmap = cv2.applyColorMap(heatmapimg, cv2.COLORMAP_JET) heatmap = heatmap/255 plt.imshow(img) plt.imshow(heatmap, alpha=0.5) plt.show() #plt.savefig('hmtestpadh36'+str(i)+js[j]+'.png')
Example #5
Source File: vis.py From HigherHRNet-Human-Pose-Estimation with MIT License | 6 votes |
def make_heatmaps(image, heatmaps): heatmaps = heatmaps.mul(255)\ .clamp(0, 255)\ .byte()\ .cpu().numpy() num_joints, height, width = heatmaps.shape image_resized = cv2.resize(image, (int(width), int(height))) image_grid = np.zeros((height, (num_joints+1)*width, 3), dtype=np.uint8) for j in range(num_joints): # add_joints(image_resized, joints[:, j, :]) heatmap = heatmaps[j, :, :] colored_heatmap = cv2.applyColorMap(heatmap, cv2.COLORMAP_JET) image_fused = colored_heatmap*0.7 + image_resized*0.3 width_begin = width * (j+1) width_end = width * (j+2) image_grid[:, width_begin:width_end, :] = image_fused image_grid[:, 0:width, :] = image_resized return image_grid
Example #6
Source File: CThermal.py From Thermal_Image_Analysis with MIT License | 6 votes |
def generate_colorbar(self, min_temp=None, max_temp=None, cmap=cv.COLORMAP_JET, height=None): if min_temp is None: min_temp = self.global_min_temp if max_temp is None: max_temp = self.global_max_temp cb_gray = np.arange(255,0,-1,dtype=np.uint8).reshape((255,1)) if cmap is not None: cb_color = cv.applyColorMap(cb_gray, cmap) else: cb_color = cv.cvtColor(cb_gray, cv.COLOR_GRAY2BGR) for i in range(1,6): cb_color = np.concatenate( (cb_color, cb_color), axis=1 ) if height is None: append_img = np.zeros( (self.thermal_image.shape[0], cb_color.shape[1]+30, 3), dtype=np.uint8 ) else: append_img = np.zeros( (height, cb_color.shape[1]+30, 3), dtype=np.uint8 ) append_img[append_img.shape[0]//2-cb_color.shape[0]//2 : append_img.shape[0]//2 - (cb_color.shape[0]//2) + cb_color.shape[0] , 10 : 10 + cb_color.shape[1] ] = cb_color cv.putText(append_img, str(min_temp), (5, append_img.shape[0]//2 - (cb_color.shape[0]//2) + cb_color.shape[0] + 30), cv.FONT_HERSHEY_PLAIN, 1, (255,0,0) , 1, 8) cv.putText(append_img, str(max_temp), (5, append_img.shape[0]//2-cb_color.shape[0]//2-20) , cv.FONT_HERSHEY_PLAIN, 1, (0,0,255) , 1, 8 ) return append_img
Example #7
Source File: my.py From 3D-HourGlass-Network with MIT License | 6 votes |
def test_heatmaps(heatmaps,img,i): heatmaps=heatmaps.numpy() #heatmaps=np.squeeze(heatmaps) heatmaps=heatmaps[:,:64,:] heatmaps=heatmaps.transpose(1,2,0) print('heatmap inside shape is',heatmaps.shape) ## print('----------------here') ## print(heatmaps.shape) img=img.numpy() #img=np.squeeze(img) img=img.transpose(1,2,0) img=cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # print('heatmaps',heatmaps.shape) heatmaps = cv2.resize(heatmaps,(0,0), fx=4,fy=4) # print('heatmapsafter',heatmaps.shape) for j in range(0, 16): heatmap = heatmaps[:,:,j] heatmap = heatmap.reshape((256,256,1)) heatmapimg = np.array(heatmap * 255, dtype = np.uint8) heatmap = cv2.applyColorMap(heatmapimg, cv2.COLORMAP_JET) heatmap = heatmap/255 plt.imshow(img) plt.imshow(heatmap, alpha=0.5) plt.show() #plt.savefig('hmtestpadh36'+str(i)+js[j]+'.png')
Example #8
Source File: misc.py From LightNet with MIT License | 6 votes |
def save_class_activation_on_image(org_img, activation_map, file_name): """ Saves cam activation map and activation map on the original image Args: org_img (PIL img): Original image activation_map (numpy arr): activation map (grayscale) 0-255 file_name (str): File name of the exported image """ if not os.path.exists('../results'): os.makedirs('../results') # Grayscale activation map path_to_file = os.path.join('../results', file_name+'_Cam_Grayscale.jpg') cv2.imwrite(path_to_file, activation_map) # Heatmap of activation map activation_heatmap = cv2.applyColorMap(activation_map, cv2.COLORMAP_HSV) path_to_file = os.path.join('../results', file_name+'_Cam_Heatmap.jpg') cv2.imwrite(path_to_file, activation_heatmap) # Heatmap on picture org_img = cv2.resize(org_img, (224, 224)) img_with_heatmap = np.float32(activation_heatmap) + np.float32(org_img) img_with_heatmap = img_with_heatmap / np.max(img_with_heatmap) path_to_file = os.path.join('../results', file_name+'_Cam_On_Image.jpg') cv2.imwrite(path_to_file, np.uint8(255 * img_with_heatmap))
Example #9
Source File: vis_utils.py From pytorch-ppn with MIT License | 6 votes |
def vis_gaussian_maps(im, gaussian_maps, stride, save_im=False, save_path='./exps/preds/vis_results/gaussian_map_on_im.jpg'): # print 'Visualize gaussian maps' gm_num = gaussian_maps.shape[0] plot_grid_size = np.ceil(np.sqrt(gm_num)) for gmi in range(0, gm_num): gaussian_map = gaussian_maps[gmi, :, :].copy() if gaussian_map.max() > 0: gaussian_map -= gaussian_map.min() gaussian_map /= gaussian_map.max() resized_gaussian_map = gaussian_map * 255 resized_gaussian_map = cv2.resize(resized_gaussian_map, None, fx=stride, fy=stride, interpolation=cv2.INTER_LINEAR) resized_gaussian_map = resized_gaussian_map.astype(np.uint8) resized_gaussian_map = cv2.applyColorMap(resized_gaussian_map, cv2.COLORMAP_JET) vis_gaussian_map_im = cv2.addWeighted(resized_gaussian_map, 0.5, im.astype(np.uint8), 0.5, 0.0); plt.subplot(plot_grid_size, plot_grid_size, gmi + 1), plt.imshow(vis_gaussian_map_im[:, :, [2, 1, 0]]), plt.title(joint_names[gmi], **{'size':'10'}) plt.xticks([]) plt.yticks([]) plt.subplots_adjust(top=0.92, bottom=0.08, left=0.05, right=0.95, hspace=0.35, wspace=0.15) if save_im: plt.savefig(save_path)
Example #10
Source File: visualization.py From SSENet-pytorch with MIT License | 6 votes |
def color_pro(pro, img=None, mode='hwc'): H, W = pro.shape pro_255 = (pro*255).astype(np.uint8) pro_255 = np.expand_dims(pro_255,axis=2) color = cv2.applyColorMap(pro_255,cv2.COLORMAP_JET) color = cv2.cvtColor(color, cv2.COLOR_BGR2RGB) if img is not None: rate = 0.5 if mode == 'hwc': assert img.shape[0] == H and img.shape[1] == W color = cv2.addWeighted(img,rate,color,1-rate,0) elif mode == 'chw': assert img.shape[1] == H and img.shape[2] == W img = np.transpose(img,(1,2,0)) color = cv2.addWeighted(img,rate,color,1-rate,0) color = np.transpose(color,(2,0,1)) else: if mode == 'chw': color = np.transpose(color,(2,0,1)) return color
Example #11
Source File: grad_cam.py From face_classification with MIT License | 6 votes |
def calculate_gradient_weighted_CAM(gradient_function, image): output, evaluated_gradients = gradient_function([image, False]) output, evaluated_gradients = output[0, :], evaluated_gradients[0, :, :, :] weights = np.mean(evaluated_gradients, axis=(0, 1)) CAM = np.ones(output.shape[0: 2], dtype=np.float32) for weight_arg, weight in enumerate(weights): CAM = CAM + (weight * output[:, :, weight_arg]) CAM = cv2.resize(CAM, (64, 64)) CAM = np.maximum(CAM, 0) heatmap = CAM / np.max(CAM) # Return to BGR [0..255] from the preprocessed image image = image[0, :] image = image - np.min(image) image = np.minimum(image, 255) CAM = cv2.applyColorMap(np.uint8(255 * heatmap), cv2.COLORMAP_JET) CAM = np.float32(CAM) + np.float32(image) CAM = 255 * CAM / np.max(CAM) return np.uint8(CAM), heatmap
Example #12
Source File: grad_cam.py From Emotion with MIT License | 6 votes |
def calculate_gradient_weighted_CAM(gradient_function, image): output, evaluated_gradients = gradient_function([image, False]) output, evaluated_gradients = output[0, :], evaluated_gradients[0, :, :, :] weights = np.mean(evaluated_gradients, axis = (0, 1)) CAM = np.ones(output.shape[0 : 2], dtype=np.float32) for weight_arg, weight in enumerate(weights): CAM = CAM + (weight * output[:, :, weight_arg]) CAM = cv2.resize(CAM, (64, 64)) CAM = np.maximum(CAM, 0) heatmap = CAM / np.max(CAM) #Return to BGR [0..255] from the preprocessed image image = image[0, :] image = image - np.min(image) image = np.minimum(image, 255) CAM = cv2.applyColorMap(np.uint8(255 * heatmap), cv2.COLORMAP_JET) CAM = np.float32(CAM) + np.float32(image) CAM = 255 * CAM / np.max(CAM) return np.uint8(CAM), heatmap
Example #13
Source File: h5_test.py From keras-image-segmentation with MIT License | 6 votes |
def image_copy_to_dir(mode, x_paths, y_paths): target_path = '/run/media/tkwoo/myWorkspace/workspace/01.dataset/03.Mask_data/cityscape' target_path = os.path.join(target_path, mode) for idx in trange(len(x_paths)): image = cv2.imread(x_paths[idx], 1) mask = cv2.imread(y_paths[idx], 0) image = cv2.resize(image, None, fx=0.25, fy=0.25, interpolation=cv2.INTER_LINEAR) mask = cv2.resize(mask, None, fx=0.25, fy=0.25, interpolation=cv2.INTER_NEAREST) cv2.imwrite(os.path.join(target_path, 'image', os.path.basename(x_paths[idx])), image) cv2.imwrite(os.path.join(target_path, 'mask', os.path.basename(y_paths[idx])), mask) # show = image.copy() # mask = (mask.astype(np.float32)*255/33).astype(np.uint8) # mask_color = cv2.applyColorMap(mask, cv2.COLORMAP_JET) # show = cv2.addWeighted(show, 0.5, mask_color, 0.5, 0.0) # cv2.imshow('show', show) # key = cv2.waitKey(1) # if key == 27: # return
Example #14
Source File: system.py From sips2_open with GNU General Public License v3.0 | 6 votes |
def render(self, n_max=0, fallback_im=None): if self.image_scores is not None: im = cv2.applyColorMap((self.image_scores * 255).astype(np.uint8), cv2.COLORMAP_JET) else: assert fallback_im is not None im = cv2.cvtColor(fallback_im, cv2.COLOR_GRAY2BGR) if n_max == 0: n_max = self.ips_rc.shape[1] for i in range(n_max): thickness_relevant_score = \ np.clip(self.ip_scores[i], 0.2, 0.6) - 0.2 thickness = int(thickness_relevant_score * 20) if type(self.scales) == np.ndarray: radius = int(self.scales[i] * 10) else: radius = 10 cv2.circle(im, tuple(self.ips_rc[[1, 0], i]), radius, (0, 255, 0), thickness, cv2.LINE_AA) return im
Example #15
Source File: misc_functions.py From aerial_mtl with BSD 3-Clause "New" or "Revised" License | 6 votes |
def save_class_activation_on_image(org_img, activation_map, file_name): """ Saves cam activation map and activation map on the original image Args: org_img (PIL img): Original image activation_map (numpy arr): activation map (grayscale) 0-255 file_name (str): File name of the exported image """ # Grayscale activation map path_to_file = os.path.join('../results', file_name+'_Cam_Grayscale.jpg') cv2.imwrite(path_to_file, activation_map) # Heatmap of activation map activation_heatmap = cv2.applyColorMap(activation_map, cv2.COLORMAP_HSV) path_to_file = os.path.join('../results', file_name+'_Cam_Heatmap.jpg') cv2.imwrite(path_to_file, activation_heatmap) # Heatmap on picture org_img = cv2.resize(org_img, (224, 224)) img_with_heatmap = np.float32(activation_heatmap) + np.float32(org_img) img_with_heatmap = img_with_heatmap / np.max(img_with_heatmap) path_to_file = os.path.join('../results', file_name+'_Cam_On_Image.jpg') cv2.imwrite(path_to_file, np.uint8(255 * img_with_heatmap))
Example #16
Source File: train.py From keras-image-segmentation with MIT License | 6 votes |
def train_generator(self, image_generator, mask_generator): # cv2.namedWindow('show', 0) # cv2.resizeWindow('show', 1280, 640) while True: image = next(image_generator) mask = next(mask_generator) label = self.make_regressor_label(mask).astype(np.float32) # print (image.dtype, label.dtype) # print (image.shape, label.shape) # exit() # cv2.imshow('show', image[0].astype(np.uint8)) # cv2.imshow('label', label[0].astype(np.uint8)) # mask = self.select_labels(mask) # print (image.shape) # print (mask.shape) # image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) # mask = (mask.astype(np.float32)*255/33).astype(np.uint8) # mask_color = cv2.applyColorMap(mask, cv2.COLORMAP_JET) # print (mask_color.shape) # show = cv2.addWeighted(image, 0.5, mask_color, 0.5, 0.0) # cv2.imshow("show", show) # key = cv2.waitKey() # if key == 27: # exit() yield (image, label)
Example #17
Source File: plot_utils.py From imips_open with GNU General Public License v3.0 | 6 votes |
def tile(net_outs, rows, cols, downscale, ips_rc=None): assert net_outs.shape[2] == 128 xdim = net_outs.shape[1] ydim = net_outs.shape[0] im = np.zeros([rows * ydim, cols * xdim, 3]) for r in range(rows): for c in range(cols): im_i = cv2.applyColorMap( (net_outs[:, :, r * cols + c] * 255).astype(np.uint8), cv2.COLORMAP_JET) if ips_rc is not None: cv2.circle(im_i, tuple(ips_rc[[1, 0], r * cols + c]), downscale * 5, (0, 0, 0), downscale * 3, cv2.LINE_AA) im[r * ydim:(r + 1) * ydim, c * xdim:(c + 1) * xdim, :] = im_i return skimage.measure.block_reduce(im, (downscale, downscale, 1), np.max)
Example #18
Source File: sips_system.py From imips_open with GNU General Public License v3.0 | 6 votes |
def render(self, n_max=0, fallback_im=None): if self.image_scores is not None: im = cv2.applyColorMap((self.image_scores * 255).astype(np.uint8), cv2.COLORMAP_JET) else: assert fallback_im is not None im = cv2.cvtColor(fallback_im, cv2.COLOR_GRAY2BGR) if n_max == 0: n_max = self.ips_rc.shape[1] for i in range(n_max): thickness_relevant_score = \ np.clip(self.ip_scores[i], 0.2, 0.6) - 0.2 thickness = int(thickness_relevant_score * 20) if type(self.scales) == np.ndarray: radius = int(self.scales[i] * 10) else: radius = 10 cv2.circle(im, tuple(self.ips_rc[[1, 0], i]), radius, (0, 255, 0), thickness, cv2.LINE_AA) return im
Example #19
Source File: grad_cam.py From Face-and-Emotion-Recognition with MIT License | 6 votes |
def calculate_gradient_weighted_CAM(gradient_function, image): output, evaluated_gradients = gradient_function([image, False]) output, evaluated_gradients = output[0, :], evaluated_gradients[0, :, :, :] weights = np.mean(evaluated_gradients, axis = (0, 1)) CAM = np.ones(output.shape[0 : 2], dtype=np.float32) for weight_arg, weight in enumerate(weights): CAM = CAM + (weight * output[:, :, weight_arg]) CAM = cv2.resize(CAM, (64, 64)) CAM = np.maximum(CAM, 0) heatmap = CAM / np.max(CAM) #Return to BGR [0..255] from the preprocessed image image = image[0, :] image = image - np.min(image) image = np.minimum(image, 255) CAM = cv2.applyColorMap(np.uint8(255 * heatmap), cv2.COLORMAP_JET) CAM = np.float32(CAM) + np.float32(image) CAM = 255 * CAM / np.max(CAM) return np.uint8(CAM), heatmap
Example #20
Source File: utils.py From PPGNet with MIT License | 6 votes |
def draw_lines(imgs, lines, scores=None, width=2): assert len(imgs) == len(lines) imgs = np.uint8(imgs) bs = len(imgs) if scores is not None: assert len(scores) == bs res = [] for b in range(bs): img = imgs[b].transpose((1, 2, 0)) line = lines[b] if scores is None: score = np.zeros(len(line)) else: score = scores[b] img = img.copy() for (x1, y1, x2, y2), c in zip(line, score): pt1, pt2 = (x1, y1), (x2, y2) c = tuple(cv2.applyColorMap(np.array(c * 255, dtype=np.uint8), cv2.COLORMAP_JET).flatten().tolist()) img = cv2.line(img, pt1, pt2, c, width) img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) res.append(th.from_numpy(img.transpose((2, 0, 1)))) return res
Example #21
Source File: CThermal.py From Thermal_Image_Analysis with MIT License | 6 votes |
def line_measurement(self, image, thermal_np, cmap=cv.COLORMAP_JET): img = image.copy() line, point1, point2 = CFlir.get_line(img) line_temps = np.zeros(len(line)) if len(img.shape) == 3: gray_values = np.arange(256, dtype=np.uint8) color_values = map(tuple, cv.applyColorMap(gray_values, cmap).reshape(256, 3)) color_to_gray_map = dict(zip(color_values, gray_values)) img = np.apply_along_axis(lambda bgr: color_to_gray_map[tuple(bgr)], 2, image) for i in range(0,len(line)): line_temps[i] = thermal_np[ line[i][1], line[i][0] ] cv.line(img, point1, point2, 255, 2, 8) plt.subplot(1, 5, (1,2) ) plt.imshow(img, cmap='jet') plt.title('Image') plt.subplot(1, 5, (4,5) ) plt.plot(line_temps) plt.title('Distance vs Temperature') plt.show() logger.info(f'\nMin line: {np.amin(line_temps)}\nMax line: {np.amax(line_temps)}' )
Example #22
Source File: vis_utils.py From pytorch-pil with MIT License | 6 votes |
def vis_gaussian_maps(im, gaussian_maps, stride, save_im=False, save_path='exps/preds/vis_results/gaussian_map_on_im.jpg'): #print 'Visualize gaussian maps' gm_num = gaussian_maps.shape[0] plot_grid_size = np.ceil(np.sqrt(gm_num)) for gmi in range(0, gm_num): gaussian_map = gaussian_maps[gmi, :, :].copy() if gaussian_map.max() > 0: gaussian_map -= gaussian_map.min() gaussian_map /= gaussian_map.max() resized_gaussian_map = gaussian_map * 255 resized_gaussian_map = cv2.resize(resized_gaussian_map, None, fx=stride, fy=stride, interpolation=cv2.INTER_LINEAR) resized_gaussian_map = resized_gaussian_map.astype(np.uint8) resized_gaussian_map = cv2.applyColorMap(resized_gaussian_map, cv2.COLORMAP_JET) vis_gaussian_map_im = cv2.addWeighted(resized_gaussian_map, 0.5, im.astype(np.uint8), 0.5, 0.0); plt.subplot(plot_grid_size, plot_grid_size, gmi + 1),plt.imshow(vis_gaussian_map_im[:, :, [2, 1, 0]]), plt.title(joint_names[gmi]) plt.xticks([]) plt.yticks([]) if save_im: plt.savefig(save_path)
Example #23
Source File: run.py From keras-steering-angle-visualizations with MIT License | 6 votes |
def visualize_hypercolumns(model, original_img): img = np.float32(cv2.resize(original_img, (200, 66))) / 255.0 layers_extract = [9] hc = extract_hypercolumns(model, layers_extract, img) avg = np.product(hc, axis=0) avg = np.abs(avg) avg = avg / np.max(np.max(avg)) heatmap = cv2.applyColorMap(np.uint8(255 * avg), cv2.COLORMAP_JET) heatmap = np.float32(heatmap) / np.max(np.max(heatmap)) heatmap = cv2.resize(heatmap, original_img.shape[0:2][::-1]) both = 255 * heatmap * 0.7 + original_img both = both / np.max(both) return both
Example #24
Source File: utils.py From SceneChangeDet with MIT License | 6 votes |
def various_scale_attention_weights_visualize(spatial_weights,original_img1,original_img2,save_base_path,filename): nchannel, height,width = spatial_weights.shape scale_list = ['common','t0','t1'] original_imgs = [original_img1,original_img1,original_img2] assert len(scale_list) == len(spatial_weights) for idx in range(nchannel): height_img, width_img, channel = original_imgs[idx].shape scale_x = spatial_weights[idx] scale_name = scale_list[idx] scalex_x_att_map = cv2.resize(scale_x,(width_img,height_img),interpolation=cv2.INTER_LINEAR) scalex_x_att_map_ = cv2.applyColorMap(np.uint8(255* scalex_x_att_map),cv2.COLORMAP_JET) fuse_scale_att_map = 0.6 * scalex_x_att_map_ + 0.4 * original_imgs[idx] cv2.imwrite(save_base_path + '_' + str(filename) + '_origin_' + str(scale_name) + '.jpg', scalex_x_att_map_) cv2.imwrite(save_base_path + '_' + str(filename) + '_fuse_' + str(scale_name) + '.jpg', fuse_scale_att_map)
Example #25
Source File: train.py From SceneChangeDet with MIT License | 6 votes |
def single_layer_similar_heatmap_visual(output_t0,output_t1,save_change_map_dir,epoch,filename,layer_flag,dist_flag): interp = nn.Upsample(size=[cfg.TRANSFROM_SCALES[1],cfg.TRANSFROM_SCALES[0]], mode='bilinear') n, c, h, w = output_t0.data.shape out_t0_rz = torch.transpose(output_t0.view(c, h * w), 1, 0) out_t1_rz = torch.transpose(output_t1.view(c, h * w), 1, 0) distance = various_distance(out_t0_rz,out_t1_rz,dist_flag=dist_flag) similar_distance_map = distance.view(h,w).data.cpu().numpy() similar_distance_map_rz = interp(Variable(torch.from_numpy(similar_distance_map[np.newaxis, np.newaxis, :]))) similar_dis_map_colorize = cv2.applyColorMap(np.uint8(255 * similar_distance_map_rz.data.cpu().numpy()[0][0]), cv2.COLORMAP_JET) save_change_map_dir_ = os.path.join(save_change_map_dir, 'epoch_' + str(epoch)) check_dir(save_change_map_dir_) save_change_map_dir_layer = os.path.join(save_change_map_dir_,layer_flag) check_dir(save_change_map_dir_layer) save_weight_fig_dir = os.path.join(save_change_map_dir_layer, filename + '.jpg') cv2.imwrite(save_weight_fig_dir, similar_dis_map_colorize) return similar_distance_map_rz.data.cpu().numpy()
Example #26
Source File: utils.py From SceneChangeDet with MIT License | 5 votes |
def attention_weights_visulize(weights_dict,original_img,save_base_path): for idx,loc_attention_weight_vec in weights_dict.iteritems(): height, width, channel = original_img.shape alpha_att_map = cv2.resize(loc_attention_weight_vec, (width,height), interpolation=cv2.INTER_LINEAR) alpha_att_map_ = cv2.applyColorMap(np.uint8(255 * alpha_att_map), cv2.COLORMAP_JET) fuse_heat_map = 0.6 * alpha_att_map_ + 0.4 * original_img cv2.imwrite(save_base_path + '_' + str(idx) + '.jpg',fuse_heat_map) #print idx
Example #27
Source File: utils.py From PlaneNet with MIT License | 5 votes |
def drawDepthImage(depth): depthImage = np.clip(depth / 5 * 255, 0, 255).astype(np.uint8) depthImage = cv2.applyColorMap(255 - depthImage, colormap=cv2.COLORMAP_JET) return depthImage ## Math operations
Example #28
Source File: utils.py From visual-pushing-grasping with BSD 2-Clause "Simplified" License | 5 votes |
def get_affordance_vis(grasp_affordances, input_images, num_rotations, best_pix_ind): vis = None for vis_row in range(num_rotations/4): tmp_row_vis = None for vis_col in range(4): rotate_idx = vis_row*4+vis_col affordance_vis = grasp_affordances[rotate_idx,:,:] affordance_vis[affordance_vis < 0] = 0 # assume probability # affordance_vis = np.divide(affordance_vis, np.max(affordance_vis)) affordance_vis[affordance_vis > 1] = 1 # assume probability affordance_vis.shape = (grasp_affordances.shape[1], grasp_affordances.shape[2]) affordance_vis = cv2.applyColorMap((affordance_vis*255).astype(np.uint8), cv2.COLORMAP_JET) input_image_vis = (input_images[rotate_idx,:,:,:]*255).astype(np.uint8) input_image_vis = cv2.resize(input_image_vis, (0,0), fx=0.5, fy=0.5, interpolation=cv2.INTER_NEAREST) affordance_vis = (0.5*cv2.cvtColor(input_image_vis, cv2.COLOR_RGB2BGR) + 0.5*affordance_vis).astype(np.uint8) if rotate_idx == best_pix_ind[0]: affordance_vis = cv2.circle(affordance_vis, (int(best_pix_ind[2]), int(best_pix_ind[1])), 7, (0,0,255), 2) if tmp_row_vis is None: tmp_row_vis = affordance_vis else: tmp_row_vis = np.concatenate((tmp_row_vis,affordance_vis), axis=1) if vis is None: vis = tmp_row_vis else: vis = np.concatenate((vis,tmp_row_vis), axis=0) return vis
Example #29
Source File: trainer.py From visual-pushing-grasping with BSD 2-Clause "Simplified" License | 5 votes |
def get_prediction_vis(self, predictions, color_heightmap, best_pix_ind): canvas = None num_rotations = predictions.shape[0] for canvas_row in range(int(num_rotations/4)): tmp_row_canvas = None for canvas_col in range(4): rotate_idx = canvas_row*4+canvas_col prediction_vis = predictions[rotate_idx,:,:].copy() # prediction_vis[prediction_vis < 0] = 0 # assume probability # prediction_vis[prediction_vis > 1] = 1 # assume probability prediction_vis = np.clip(prediction_vis, 0, 1) prediction_vis.shape = (predictions.shape[1], predictions.shape[2]) prediction_vis = cv2.applyColorMap((prediction_vis*255).astype(np.uint8), cv2.COLORMAP_JET) if rotate_idx == best_pix_ind[0]: prediction_vis = cv2.circle(prediction_vis, (int(best_pix_ind[2]), int(best_pix_ind[1])), 7, (0,0,255), 2) prediction_vis = ndimage.rotate(prediction_vis, rotate_idx*(360.0/num_rotations), reshape=False, order=0) background_image = ndimage.rotate(color_heightmap, rotate_idx*(360.0/num_rotations), reshape=False, order=0) prediction_vis = (0.5*cv2.cvtColor(background_image, cv2.COLOR_RGB2BGR) + 0.5*prediction_vis).astype(np.uint8) if tmp_row_canvas is None: tmp_row_canvas = prediction_vis else: tmp_row_canvas = np.concatenate((tmp_row_canvas,prediction_vis), axis=1) if canvas is None: canvas = tmp_row_canvas else: canvas = np.concatenate((canvas,tmp_row_canvas), axis=0) return canvas
Example #30
Source File: CThermal.py From Thermal_Image_Analysis with MIT License | 5 votes |
def get_temp_image(thermal_np, colormap=cv.COLORMAP_JET): thermal_np_norm = CFlir.normalize(thermal_np) thermal_image = np.array(thermal_np_norm*255, dtype=np.uint8) if colormap != None: thermal_image = cv.applyColorMap(thermal_image, colormap) return thermal_image