Java Code Examples for org.opencv.core.Mat#convertTo()
The following examples show how to use
org.opencv.core.Mat#convertTo() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FusionEnhance.java From OptimizedImageEnhance with MIT License | 7 votes |
public static Mat enhance (Mat image, int level) { // color balance Mat img1 = Filters.SimplestColorBalance(image, 5); img1.convertTo(img1, CvType.CV_8UC1); // Perform sRGB to CIE Lab color space conversion Mat LabIm1 = new Mat(); Imgproc.cvtColor(img1, LabIm1, Imgproc.COLOR_BGR2Lab); Mat L1 = new Mat(); Core.extractChannel(LabIm1, L1, 0); // apply CLAHE Mat[] result = applyCLAHE(LabIm1, L1); Mat img2 = result[0]; Mat L2 = result[1]; // calculate normalized weight Mat w1 = calWeight(img1, L1); Mat w2 = calWeight(img2, L2); Mat sumW = new Mat(); Core.add(w1, w2, sumW); Core.divide(w1, sumW, w1); Core.divide(w2, sumW, w2); // merge image1 and image2 return ImgDecompose.fuseTwoImage(w1, img1, w2, img2, level); }
Example 2
Source File: EnhanceFunc.java From ImageEnhanceViaFusion with MIT License | 6 votes |
private static Mat calWeight(Mat img, Mat L) { Core.divide(L, new Scalar(255.0), L); L.convertTo(L, CvType.CV_32F); // calculate laplacian contrast weight Mat WL = WeightCalculate.LaplacianContrast(L); WL.convertTo(WL, L.type()); // calculate Local contrast weight Mat WC = WeightCalculate.LocalContrast(L); WC.convertTo(WC, L.type()); // calculate the saliency weight Mat WS = WeightCalculate.Saliency(img); WS.convertTo(WS, L.type()); // calculate the exposedness weight Mat WE = WeightCalculate.Exposedness(L); WE.convertTo(WE, L.type()); // sum Mat weight = WL.clone(); Core.add(weight, WC, weight); Core.add(weight, WS, weight); Core.add(weight, WE, weight); return weight; }
Example 3
Source File: Eigenfaces.java From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 | 6 votes |
public String recognize(Mat img, String expectedLabel){ // Ignore img = img.reshape(1,1); // Subtract mean img.convertTo(img, CvType.CV_32F); Core.subtract(img, Psi, img); // Project to subspace Mat projected = getFeatureVector(img); // Save all points of image for tSNE img.convertTo(img, CvType.CV_8U); addImage(projected, expectedLabel, true); //addImage(projected, expectedLabel); Mat distance = new Mat(Omega.rows(), 1, CvType.CV_64FC1); for (int i=0; i<Omega.rows(); i++){ double dist = Core.norm(projected.row(0), Omega.row(i), Core.NORM_L2); distance.put(i, 0, dist); } Mat sortedDist = new Mat(Omega.rows(), 1, CvType.CV_8UC1); Core.sortIdx(distance, sortedDist, Core.SORT_EVERY_COLUMN + Core.SORT_ASCENDING); // Give back the name of the found person int index = (int)(sortedDist.get(0,0)[0]); return labelMap.getKey(labelList.get(index)); }
Example 4
Source File: FeatureWeight.java From OptimizedImageEnhance with MIT License | 6 votes |
public static Mat LuminanceWeight(Mat img, Mat L) { Mat bCnl = new Mat(); Core.extractChannel(img, bCnl, 0); bCnl.convertTo(bCnl, CvType.CV_32F); Mat gCnl = new Mat(); Core.extractChannel(img, gCnl, 1); gCnl.convertTo(gCnl, CvType.CV_32F); Mat rCnl = new Mat(); Core.extractChannel(img, rCnl, 2); rCnl.convertTo(rCnl, CvType.CV_32F); Mat lum = new Mat(L.rows(), L.cols(), L.type()); for (int i = 0; i < L.rows(); i++) { for (int j = 0; j < L.cols(); j++) { double data = Math.sqrt( ( Math.pow(bCnl.get(i, j)[0] / 255.0 - L.get(i, j)[0], 2.0) + Math.pow(gCnl.get(i, j)[0] / 255.0 - L.get(i, j)[0], 2.0) + Math.pow(rCnl.get(i, j)[0] / 255.0 - L.get(i, j)[0], 2.0) ) / 3 ); lum.put(i, j, data); } } return lum; }
Example 5
Source File: KNearestNeighbor.java From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 | 5 votes |
@Override public void loadFromFile() { MatName mtrainingList = new MatName("TrainingList", trainingList); List<MatName> listMat = new ArrayList<MatName>(); listMat.add(mtrainingList); labelList = fh.loadIntegerList(fh.createLabelFile(fh.KNN_PATH, "train")); labelMap = fh.getLabelMapFromFile(fh.KNN_PATH); trainingList = fh.getMatListFromXml(listMat, fh.KNN_PATH, trainingFile).get(0).getMat(); labels = new Mat(labelList.size(), 1, CvType.CV_8UC1); for (int i=0; i<labelList.size(); i++) { Integer label = labelList.get(i); // Fill shorter labels with 0s labels.put(i, 0, label); } labels.convertTo(labels, CvType.CV_32F); PreferencesHelper preferencesHelper = new PreferencesHelper(context); k = preferencesHelper.getK(); knn = KNearest.create(); knn.setIsClassifier(true); knn.train(trainingList, 0,labels); }
Example 6
Source File: Filters.java From ImageEnhanceViaFusion with MIT License | 5 votes |
private static Mat convertTo(Mat mat, int depth) { if (mat.depth() == depth) { return mat; } Mat result = new Mat(); mat.convertTo(result, depth); return result; }
Example 7
Source File: Tld.java From OpenTLDAndroid with Apache License 2.0 | 5 votes |
/** * Output: resized zero-mean patch/pattern * @param inImg INPUT, outPattern OUTPUT * @return stdev */ private static double resizeZeroMeanStdev(final Mat inImg, Mat outPattern, int patternSize){ if(inImg == null || outPattern == null){ return -1; } Imgproc.resize(inImg, outPattern, new Size(patternSize, patternSize)); final MatOfDouble mean = new MatOfDouble(); final MatOfDouble stdev = new MatOfDouble(); Core.meanStdDev(outPattern, mean, stdev); outPattern.convertTo(outPattern, CvType.CV_32F); Core.subtract(outPattern, new Scalar(mean.toArray()[0]), outPattern); return stdev.toArray()[0]; }
Example 8
Source File: GuidedFilterFeatheringExample.java From OptimizedImageEnhance with MIT License | 5 votes |
public static void main(String[] args) { String imgPath = "src/main/resources/dcp_images/feathering/toy.bmp"; String guidedImgPath = "src/main/resources/dcp_images/feathering/toy-mask.bmp"; Mat image = Imgcodecs.imread(imgPath, Imgcodecs.CV_LOAD_IMAGE_COLOR); // load image new ImShow("image").showImage(image); image.convertTo(image, CvType.CV_32F); Mat guide = Imgcodecs.imread(guidedImgPath, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); guide.convertTo(guide, CvType.CV_32F); int r = 60; double eps = 0.000001; Mat q = Filters.GuidedImageFilter_Color(image, guide, r, eps, 1, -1); q.convertTo(q, CvType.CV_8UC1); new ImShow("q").showImage(q); }
Example 9
Source File: GuidedFilterSmoothingExample.java From OptimizedImageEnhance with MIT License | 5 votes |
public static void main(String[] args) { String imgPath = "src/main/resources/dcp_images/smoothing/cat.bmp"; Mat image = Imgcodecs.imread(imgPath, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); new ImShow("image").showImage(image); image.convertTo(image, CvType.CV_32F); Mat guide = image.clone(); int r = 4; // try r=2, 4, or 8 double eps = 0.16; // try eps=0.01, 0.04, 0.16 Mat q = Filters.GuidedImageFilter(image, guide, r, eps); q.convertTo(q, CvType.CV_8UC1); new ImShow("q").showImage(q);; }
Example 10
Source File: GammaCorrection.java From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 | 5 votes |
public PreProcessor preprocessImage(PreProcessor preProcessor) { List<Mat> images = preProcessor.getImages(); List<Mat> processed = new ArrayList<Mat>(); for (Mat img : images){ img.convertTo(img, CvType.CV_32F); Core.divide(img, INT_MAX, img); Core.pow(img, gamma, img); Core.multiply(img, INT_MAX, img); img.convertTo(img, CvType.CV_8U); processed.add(img); } preProcessor.setImages(processed); return preProcessor; }
Example 11
Source File: Recognition.java From classchecks with Apache License 2.0 | 5 votes |
public static Mat subspaceReconstruct(Mat W, Mat mean, Mat src) { int n = src.rows(); int d = src.cols(); Mat X = new Mat(); Mat Y = new Mat(); src.convertTo(Y, W.type()); Core.gemm(Y, W, 1.0, new Mat(), 0.0, X, 2); if(!mean.empty()) { for(int i = 0; i < n; i ++) { Mat r_i = X.row(i); Core.add(r_i, mean.reshape(1, 1), r_i); } } return X; }
Example 12
Source File: FeatureWeight.java From OptimizedImageEnhance with MIT License | 5 votes |
public static Mat Saliency(Mat img) { // blur image with a 3x3 or 5x5 Gaussian filter Mat gfbgr = new Mat(); Imgproc.GaussianBlur(img, gfbgr, new Size(3, 3), 3); // Perform sRGB to CIE Lab color space conversion Mat LabIm = new Mat(); Imgproc.cvtColor(gfbgr, LabIm, Imgproc.COLOR_BGR2Lab); // Compute Lab average values (note that in the paper this average is found from the // un-blurred original image, but the results are quite similar) List<Mat> lab = new ArrayList<>(); Core.split(LabIm, lab); Mat l = lab.get(0); l.convertTo(l, CvType.CV_32F); Mat a = lab.get(1); a.convertTo(a, CvType.CV_32F); Mat b = lab.get(2); b.convertTo(b, CvType.CV_32F); double lm = Core.mean(l).val[0]; double am = Core.mean(a).val[0]; double bm = Core.mean(b).val[0]; // Finally compute the saliency map Mat sm = Mat.zeros(l.rows(), l.cols(), l.type()); Core.subtract(l, new Scalar(lm), l); Core.subtract(a, new Scalar(am), a); Core.subtract(b, new Scalar(bm), b); Core.add(sm, l.mul(l), sm); Core.add(sm, a.mul(a), sm); Core.add(sm, b.mul(b), sm); return sm; }
Example 13
Source File: ImgprocessUtils.java From classchecks with Apache License 2.0 | 5 votes |
/** * * @Title: gammaAdjust * @Description: gamma校正 * @param grayImg * @return * Mat * @throws */ public static Mat gammaAdjust(Mat grayImg) { Mat X = new Mat(); grayImg.convertTo(X, CvType.CV_32FC1); Mat I = new Mat(); float gamma = 1/2.2f; Core.pow(X, gamma, I); Mat result = norm_0_255(I); return result; }
Example 14
Source File: OpenCVNonMavenExamples.java From Java-for-Data-Science with MIT License | 5 votes |
public void enhanceImageBrightness() { double alpha = 1; // Change to 2 for more brightness double beta = 50; String fileName = "cat.jpg"; Mat source = Imgcodecs.imread("cat.jpg"); Mat destination = new Mat(source.rows(), source.cols(), source.type()); source.convertTo(destination, -1, 1, 50); Imgcodecs.imwrite("brighterCat.jpg", destination); }
Example 15
Source File: DctConverter.java From BlindWatermark with Apache License 2.0 | 5 votes |
@Override public void addImageWatermark(Mat com, Mat watermark) { Mat mask = new Mat(); inRange(watermark, new Scalar(0, 0, 0, 0), new Scalar(0, 0, 0, 0), mask); Mat i2 = new Mat(watermark.size(), watermark.type(), new Scalar(2, 2, 2, 0)); i2.copyTo(watermark, mask); watermark.convertTo(watermark, CV_32F); int row = (com.rows() - watermark.rows()) >> 1; int col = (com.cols() - watermark.cols()) >> 1; copyMakeBorder(watermark, watermark, row, row, col, col, BORDER_CONSTANT, Scalar.all(0)); Utils.fixSize(watermark, com); addWeighted(watermark, 0.03, com, 1, 0.0, com); }
Example 16
Source File: WeightCalculate.java From ImageEnhanceViaFusion with MIT License | 5 votes |
public static Mat Saliency(Mat img) { // blur image with a 3x3 or 5x5 Gaussian filter Mat gfbgr = new Mat(); Imgproc.GaussianBlur(img, gfbgr, new Size(3, 3), 3); // Perform sRGB to CIE Lab color space conversion Mat LabIm = new Mat(); Imgproc.cvtColor(gfbgr, LabIm, Imgproc.COLOR_BGR2Lab); // Compute Lab average values (note that in the paper this average is found from the // un-blurred original image, but the results are quite similar) List<Mat> lab = new ArrayList<Mat>(); Core.split(LabIm, lab); Mat l = lab.get(0); l.convertTo(l, CvType.CV_32F); Mat a = lab.get(1); a.convertTo(a, CvType.CV_32F); Mat b = lab.get(2); b.convertTo(b, CvType.CV_32F); double lm = Core.mean(l).val[0]; double am = Core.mean(a).val[0]; double bm = Core.mean(b).val[0]; // Finally compute the saliency map Mat sm = Mat.zeros(l.rows(), l.cols(), l.type()); Core.subtract(l, new Scalar(lm), l); Core.subtract(a, new Scalar(am), a); Core.subtract(b, new Scalar(bm), b); Core.add(sm, l.mul(l), sm); Core.add(sm, a.mul(a), sm); Core.add(sm, b.mul(b), sm); return sm; }
Example 17
Source File: DftConverter.java From BlindWatermark with Apache License 2.0 | 5 votes |
@Override public Mat showWatermark(Mat src) { List<Mat> newPlanes = new ArrayList<>(2); Mat mag = new Mat(); split(src, newPlanes); magnitude(newPlanes.get(0), newPlanes.get(1), mag); add(Mat.ones(mag.size(), CV_32F), mag, mag); log(mag, mag); mag.convertTo(mag, CV_8UC1); normalize(mag, mag, 0, 255, NORM_MINMAX, CV_8UC1); return mag; }
Example 18
Source File: DftConverter.java From BlindWatermark with Apache License 2.0 | 5 votes |
@Override public Mat start(Mat src) { src.convertTo(src, CV_32F); List<Mat> planes = new ArrayList<>(2); Mat com = new Mat(); planes.add(0, src); planes.add(1, Mat.zeros(src.size(), CV_32F)); merge(planes, com); dft(com, com); return com; }
Example 19
Source File: EnhanceFunc.java From ImageEnhanceViaFusion with MIT License | 4 votes |
public static void main(String[] args) { String imgPath = "images/5.jpg"; Mat image = Imgcodecs.imread(imgPath, Imgcodecs.CV_LOAD_IMAGE_COLOR); new ImShow("original").showImage(image); // color balance Mat img1 = ColorBalance.SimplestColorBalance(image, 5); img1.convertTo(img1, CvType.CV_8UC1); // Perform sRGB to CIE Lab color space conversion Mat LabIm1 = new Mat(); Imgproc.cvtColor(img1, LabIm1, Imgproc.COLOR_BGR2Lab); Mat L1 = new Mat(); Core.extractChannel(LabIm1, L1, 0); // apply CLAHE Mat[] result = applyCLAHE(LabIm1, L1); Mat img2 = result[0]; Mat L2 = result[1]; // calculate normalized weight Mat w1 = calWeight(img1, L1); Mat w2 = calWeight(img2, L2); Mat sumW = new Mat(); Core.add(w1, w2, sumW); Core.divide(w1, sumW, w1); Core.divide(w2, sumW, w2); // construct the gaussian pyramid for weight int level = 5; Mat[] weight1 = Pyramid.GaussianPyramid(w1, level); Mat[] weight2 = Pyramid.GaussianPyramid(w2, level); // construct the laplacian pyramid for input image channel img1.convertTo(img1, CvType.CV_32F); img2.convertTo(img2, CvType.CV_32F); List<Mat> bgr = new ArrayList<Mat>(); Core.split(img1, bgr); Mat[] bCnl1 = Pyramid.LaplacianPyramid(bgr.get(0), level); Mat[] gCnl1 = Pyramid.LaplacianPyramid(bgr.get(1), level); Mat[] rCnl1 = Pyramid.LaplacianPyramid(bgr.get(2), level); bgr.clear(); Core.split(img2, bgr); Mat[] bCnl2 = Pyramid.LaplacianPyramid(bgr.get(0), level); Mat[] gCnl2 = Pyramid.LaplacianPyramid(bgr.get(1), level); Mat[] rCnl2 = Pyramid.LaplacianPyramid(bgr.get(2), level); // fusion process Mat[] bCnl = new Mat[level]; Mat[] gCnl = new Mat[level]; Mat[] rCnl = new Mat[level]; for (int i = 0; i < level; i++) { Mat cn = new Mat(); Core.add(bCnl1[i].mul(weight1[i]), bCnl2[i].mul(weight2[i]), cn); bCnl[i] = cn.clone(); Core.add(gCnl1[i].mul(weight1[i]), gCnl2[i].mul(weight2[i]), cn); gCnl[i] = cn.clone(); Core.add(rCnl1[i].mul(weight1[i]), rCnl2[i].mul(weight2[i]), cn); rCnl[i] = cn.clone(); } // reconstruct & output Mat bChannel = Pyramid.PyramidReconstruct(bCnl); Mat gChannel = Pyramid.PyramidReconstruct(gCnl); Mat rChannel = Pyramid.PyramidReconstruct(rCnl); Mat fusion = new Mat(); Core.merge(new ArrayList<Mat>(Arrays.asList(bChannel, gChannel, rChannel)), fusion); fusion.convertTo(fusion, CvType.CV_8UC1); new ImShow("fusion").showImage(fusion); }
Example 20
Source File: ALTMRetinex.java From OptimizedImageEnhance with MIT License | 4 votes |
public static Mat enhance(Mat image, int r, double eps, double eta, double lambda, double krnlRatio) { image.convertTo(image, CvType.CV_32F); // extract each color channel List<Mat> bgr = new ArrayList<>(); Core.split(image, bgr); Mat bChannel = bgr.get(0); Mat gChannel = bgr.get(1); Mat rChannel = bgr.get(2); int m = rChannel.rows(); int n = rChannel.cols(); // Global Adaptation List<Mat> list = globalAdaptation(bChannel, gChannel, rChannel, m, n); Mat Lw = list.get(0); Mat Lg = list.get(1); // Local Adaptation Mat Hg = localAdaptation(Lg, m, n, r, eps, krnlRatio); Lg.convertTo(Lg, CvType.CV_32F); // process Mat alpha = new Mat(m, n, rChannel.type()); Core.divide(Lg, new Scalar(Core.minMaxLoc(Lg).maxVal / eta), alpha); //Core.multiply(alpha, new Scalar(eta), alpha); Core.add(alpha, new Scalar(1.0), alpha); //alpha = adjustment(alpha, 1.25); Mat Lg_ = new Mat(m, n, rChannel.type()); Core.add(Lg, new Scalar(1.0 / 255.0), Lg_); Core.log(Lg_, Lg_); double beta = Math.exp(Core.sumElems(Lg_).val[0] / (m * n)) * lambda; Mat Lout = new Mat(m, n, rChannel.type()); Core.divide(Lg, Hg, Lout); Core.add(Lout, new Scalar(beta), Lout); Core.log(Lout, Lout); Core.normalize(alpha.mul(Lout), Lout, 0, 255, Core.NORM_MINMAX); Mat gain = obtainGain(Lout, Lw, m, n); // output Core.divide(rChannel.mul(gain), new Scalar(Core.minMaxLoc(rChannel).maxVal / 255.0), rChannel); // Red Channel Core.divide(gChannel.mul(gain), new Scalar(Core.minMaxLoc(gChannel).maxVal / 255.0), gChannel); // Green Channel Core.divide(bChannel.mul(gain), new Scalar(Core.minMaxLoc(bChannel).maxVal / 255.0), bChannel); // Blue Channel // merge three color channels to a image Mat outval = new Mat(); Core.merge(new ArrayList<>(Arrays.asList(bChannel, gChannel, rChannel)), outval); outval.convertTo(outval, CvType.CV_8UC1); return outval; }