Java Code Examples for org.opencv.core.Core#mean()
The following examples show how to use
org.opencv.core.Core#mean() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FeatureWeight.java From OptimizedImageEnhance with MIT License | 5 votes |
public static Mat Saliency(Mat img) { // blur image with a 3x3 or 5x5 Gaussian filter Mat gfbgr = new Mat(); Imgproc.GaussianBlur(img, gfbgr, new Size(3, 3), 3); // Perform sRGB to CIE Lab color space conversion Mat LabIm = new Mat(); Imgproc.cvtColor(gfbgr, LabIm, Imgproc.COLOR_BGR2Lab); // Compute Lab average values (note that in the paper this average is found from the // un-blurred original image, but the results are quite similar) List<Mat> lab = new ArrayList<>(); Core.split(LabIm, lab); Mat l = lab.get(0); l.convertTo(l, CvType.CV_32F); Mat a = lab.get(1); a.convertTo(a, CvType.CV_32F); Mat b = lab.get(2); b.convertTo(b, CvType.CV_32F); double lm = Core.mean(l).val[0]; double am = Core.mean(a).val[0]; double bm = Core.mean(b).val[0]; // Finally compute the saliency map Mat sm = Mat.zeros(l.rows(), l.cols(), l.type()); Core.subtract(l, new Scalar(lm), l); Core.subtract(a, new Scalar(am), a); Core.subtract(b, new Scalar(bm), b); Core.add(sm, l.mul(l), sm); Core.add(sm, a.mul(a), sm); Core.add(sm, b.mul(b), sm); return sm; }
Example 2
Source File: RemoveBackScatter.java From OptimizedImageEnhance with MIT License | 5 votes |
private static Mat dehazeProcess(Mat img, Mat trans, double[] airlight) { Mat balancedImg = Filters.SimplestColorBalance(img, 5); Mat bCnl = new Mat(); Core.extractChannel(balancedImg, bCnl, 0); Mat gCnl = new Mat(); Core.extractChannel(balancedImg, gCnl, 1); Mat rCnl = new Mat(); Core.extractChannel(balancedImg, rCnl, 2); // get mean value double bMean = Core.mean(bCnl).val[0]; double gMean = Core.mean(gCnl).val[0]; double rMean = Core.mean(rCnl).val[0]; // get transmission map for each channel Mat Tb = trans.clone(); Core.multiply(Tb, new Scalar(Math.max(bMean, Math.max(gMean, rMean)) / bMean * 0.8), Tb); Mat Tg = trans.clone(); Core.multiply(Tg, new Scalar(Math.max(bMean, Math.max(gMean, rMean)) / gMean * 0.9), Tg); Mat Tr = trans.clone(); Core.multiply(Tr, new Scalar(Math.max(bMean, Math.max(gMean, rMean)) / rMean * 0.8), Tr); // dehaze by formula // blue channel Mat bChannel = new Mat(); Core.subtract(bCnl, new Scalar(airlight[0]), bChannel); Core.divide(bChannel, Tb, bChannel); Core.add(bChannel, new Scalar(airlight[0]), bChannel); // green channel Mat gChannel = new Mat(); Core.subtract(gCnl, new Scalar(airlight[1]), gChannel); Core.divide(gChannel, Tg, gChannel); Core.add(gChannel, new Scalar(airlight[1]), gChannel); // red channel Mat rChannel = new Mat(); Core.subtract(rCnl, new Scalar(airlight[2]), rChannel); Core.divide(rChannel, Tr, rChannel); Core.add(rChannel, new Scalar(airlight[2]), rChannel); Mat dehazed = new Mat(); Core.merge(new ArrayList<>(Arrays.asList(bChannel, gChannel, rChannel)), dehazed); return dehazed; }
Example 3
Source File: WeightCalculate.java From ImageEnhanceViaFusion with MIT License | 5 votes |
public static Mat Saliency(Mat img) { // blur image with a 3x3 or 5x5 Gaussian filter Mat gfbgr = new Mat(); Imgproc.GaussianBlur(img, gfbgr, new Size(3, 3), 3); // Perform sRGB to CIE Lab color space conversion Mat LabIm = new Mat(); Imgproc.cvtColor(gfbgr, LabIm, Imgproc.COLOR_BGR2Lab); // Compute Lab average values (note that in the paper this average is found from the // un-blurred original image, but the results are quite similar) List<Mat> lab = new ArrayList<Mat>(); Core.split(LabIm, lab); Mat l = lab.get(0); l.convertTo(l, CvType.CV_32F); Mat a = lab.get(1); a.convertTo(a, CvType.CV_32F); Mat b = lab.get(2); b.convertTo(b, CvType.CV_32F); double lm = Core.mean(l).val[0]; double am = Core.mean(a).val[0]; double bm = Core.mean(b).val[0]; // Finally compute the saliency map Mat sm = Mat.zeros(l.rows(), l.cols(), l.type()); Core.subtract(l, new Scalar(lm), l); Core.subtract(a, new Scalar(am), a); Core.subtract(b, new Scalar(bm), b); Core.add(sm, l.mul(l), sm); Core.add(sm, a.mul(a), sm); Core.add(sm, b.mul(b), sm); return sm; }
Example 4
Source File: ImgprocessUtils.java From classchecks with Apache License 2.0 | 5 votes |
/** * 其主要思路为: 1、求取源图I的平均灰度,并记录rows和cols; 2、按照一定大小,分为N*M个方块,求出每块的平均值,得到子块的亮度矩阵D; 3、用矩阵D的每个元素减去源图的平均灰度,得到子块的亮度差值矩阵E; 4、用双立方差值法,将矩阵E差值成与源图一样大小的亮度分布矩阵R; 5、得到矫正后的图像result=I-R; * @Title: unevenLightCompensate * @Description: 光线补偿 * @param image * @param blockSize * void * @throws */ public static void unevenLightCompensate(Mat image, int blockSize) { if(image.channels() == 3) { Imgproc.cvtColor(image, image, 7); } double average = Core.mean(image).val[0]; Scalar scalar = new Scalar(average); int rowsNew = (int) Math.ceil((double)image.rows() / (double)blockSize); int colsNew = (int) Math.ceil((double)image.cols() / (double)blockSize); Mat blockImage = new Mat(); blockImage = Mat.zeros(rowsNew, colsNew, CvType.CV_32FC1); for(int i = 0; i < rowsNew; i ++) { for(int j = 0; j < colsNew; j ++) { int rowmin = i * blockSize; int rowmax = (i + 1) * blockSize; if(rowmax > image.rows()) rowmax = image.rows(); int colmin = j * blockSize; int colmax = (j +1) * blockSize; if(colmax > image.cols()) colmax = image.cols(); Range rangeRow = new Range(rowmin, rowmax); Range rangeCol = new Range(colmin, colmax); Mat imageROI = new Mat(image, rangeRow, rangeCol); double temaver = Core.mean(imageROI).val[0]; blockImage.put(i, j, temaver); } } Core.subtract(blockImage, scalar, blockImage); Mat blockImage2 = new Mat(); int INTER_CUBIC = 2; Imgproc.resize(blockImage, blockImage2, image.size(), 0, 0, INTER_CUBIC); Mat image2 = new Mat(); image.convertTo(image2, CvType.CV_32FC1); Mat dst = new Mat(); Core.subtract(image2, blockImage2, dst); dst.convertTo(image, CvType.CV_8UC1); }
Example 5
Source File: AutoCalibrationManager.java From ShootOFF with GNU General Public License v3.0 | 4 votes |
@Override public void process(Frame frame) { if (!patternSet) { calibrationListener.setArenaBackground("white.png"); patternSet = true; lastSample = System.currentTimeMillis(); return; } if (completed || (System.currentTimeMillis() - lastSample) < SAMPLE_DELAY) return; final Scalar mean = Core.mean(frame.getOriginalMat()); if (origMean == 0) origMean = mean.val[0]; logger.trace("{} {}", mean.val[0], TARGET_THRESH); if (mean.val[0] > TARGET_THRESH) { if (!camera.decreaseExposure()) completed = true; } else { completed = true; } if (logger.isTraceEnabled()) { String filename = String.format("exposure-%d.png", lastSample); final File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, frame.getOriginalMat()); } tries++; if (tries == NUM_TRIES) completed = true; if (completed) { if (mean.val[0] > origMean * .95 || mean.val[0] < .6 * TARGET_THRESH) { camera.resetExposure(); logger.info("Failed to adjust exposure, mean originally {} lowest {}", origMean, mean.val[0]); } else { logger.info("Exposure lowered to {} mean from {}", mean.val[0], origMean); } } lastSample = System.currentTimeMillis(); }