Python cv2.COLOR_RGB2Lab() Examples
The following are 5
code examples of cv2.COLOR_RGB2Lab().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
cv2
, or try the search function
.
Example #1
Source File: colorize.py From SRNet-Datagen with Apache License 2.0 | 6 votes |
def sample_from_data(self, bg_mat): bg_orig = bg_mat.copy() bg_mat = cv2.cvtColor(bg_mat, cv2.COLOR_RGB2Lab) bg_mat = np.reshape(bg_mat, (np.prod(bg_mat.shape[:2]),3)) bg_mean = np.mean(bg_mat, axis = 0) norms = np.linalg.norm(self.colorsLAB - bg_mean[None,:], axis = 1) # choose a random color amongst the top 3 closest matches: #nn = np.random.choice(np.argsort(norms)[:3]) nn = np.argmin(norms) ## nearest neighbour color: data_col = self.colorsRGB[np.mod(nn, self.ncol),:] # color col1 = self.sample_normal(data_col[:3],data_col[3:6]) col2 = self.sample_normal(data_col[6:9],data_col[9:12]) if nn < self.ncol: return (col2, col1) else: # need to swap to make the second color close to the input backgroun color return (col1, col2)
Example #2
Source File: utils_img.py From pyslam with GNU General Public License v3.0 | 5 votes |
def proc_clahe(img): clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) lab = cv2.cvtColor(img, cv2.COLOR_RGB2Lab) lab[:, :, 0] = clahe.apply(lab[:, :, 0]) return cv2.cvtColor(lab, cv2.COLOR_Lab2RGB) # create a scaled image of uint8 from a image of floats
Example #3
Source File: colorize.py From SRNet-Datagen with Apache License 2.0 | 5 votes |
def get_color_matrix(col_file): with open(col_file,'rb') as f: colorsRGB = cp.load(f, encoding ='latin1') ncol = colorsRGB.shape[0] colorsLAB = np.r_[colorsRGB[:,0:3], colorsRGB[:,6:9]].astype(np.uint8) colorsLAB = np.squeeze(cv2.cvtColor(colorsLAB[None,:,:], cv2.COLOR_RGB2Lab)) return colorsRGB, colorsLAB
Example #4
Source File: colorization_evaluator.py From open_model_zoo with Apache License 2.0 | 5 votes |
def data_preparation(self, input_data): input = input_data[0].astype(np.float32) img_lab = cv2.cvtColor(input, cv2.COLOR_RGB2Lab) img_l = np.copy(img_lab[:, :, 0]) img_l_rs = np.copy(img_lab[:, :, 0]) return img_l, img_l_rs
Example #5
Source File: test_color.py From imgaug with MIT License | 4 votes |
def test_every_colorspace(self): def _image_to_channel(image, cspace): if cspace == iaa.CSPACE_YCrCb: image_cvt = cv2.cvtColor(image, cv2.COLOR_RGB2YCR_CB) return image_cvt[:, :, 0:0+1] elif cspace == iaa.CSPACE_HSV: image_cvt = cv2.cvtColor(image, cv2.COLOR_RGB2HSV) return image_cvt[:, :, 2:2+1] elif cspace == iaa.CSPACE_HLS: image_cvt = cv2.cvtColor(image, cv2.COLOR_RGB2HLS) return image_cvt[:, :, 1:1+1] elif cspace == iaa.CSPACE_Lab: if hasattr(cv2, "COLOR_RGB2Lab"): image_cvt = cv2.cvtColor(image, cv2.COLOR_RGB2Lab) else: image_cvt = cv2.cvtColor(image, cv2.COLOR_RGB2LAB) return image_cvt[:, :, 0:0+1] elif cspace == iaa.CSPACE_Luv: if hasattr(cv2, "COLOR_RGB2Luv"): image_cvt = cv2.cvtColor(image, cv2.COLOR_RGB2Luv) else: image_cvt = cv2.cvtColor(image, cv2.COLOR_RGB2LUV) return image_cvt[:, :, 0:0+1] else: assert cspace == iaa.CSPACE_YUV image_cvt = cv2.cvtColor(image, cv2.COLOR_RGB2YUV) return image_cvt[:, :, 0:0+1] # Max differences between input image and image after augmentation # when no child augmenter is used (for the given example image below). # For some colorspaces the conversion to input colorspace isn't # perfect. # Values were manually checked. max_diff_expected = { iaa.CSPACE_YCrCb: 1, iaa.CSPACE_HSV: 0, iaa.CSPACE_HLS: 0, iaa.CSPACE_Lab: 2, iaa.CSPACE_Luv: 4, iaa.CSPACE_YUV: 1 } image = np.arange(6*6*3).astype(np.uint8).reshape((6, 6, 3)) for cspace in self.valid_colorspaces: with self.subTest(colorspace=cspace): child = _BatchCapturingDummyAugmenter() aug = iaa.WithBrightnessChannels( children=child, to_colorspace=cspace) image_aug = aug(image=image) expected = _image_to_channel(image, cspace) diff = np.abs( image.astype(np.int32) - image_aug.astype(np.int32)) assert np.all(diff <= max_diff_expected[cspace]) assert np.array_equal(child.last_batch.images[0], expected)