Java Code Examples for org.opencv.imgproc.Imgproc#erode()
The following examples show how to use
org.opencv.imgproc.Imgproc#erode() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GeneralUtils.java From super-cloudops with Apache License 2.0 | 6 votes |
/** * 图像腐蚀/膨胀处理 腐蚀和膨胀对处理没有噪声的图像很有利,慎用 */ public static Mat erodeDilateImg(Mat src) { Mat outImage = new Mat(); // size 越小,腐蚀的单位越小,图片越接近原图 Mat structImage = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)); /** * 图像腐蚀 腐蚀说明: 图像的一部分区域与指定的核进行卷积, 求核的最`小`值并赋值给指定区域。 * 腐蚀可以理解为图像中`高亮区域`的'领域缩小'。 意思是高亮部分会被不是高亮部分的像素侵蚀掉,使高亮部分越来越少。 */ Imgproc.erode(src, outImage, structImage, new Point(-1, -1), 2); src = outImage; /** * 膨胀 膨胀说明: 图像的一部分区域与指定的核进行卷积, 求核的最`大`值并赋值给指定区域。 * 膨胀可以理解为图像中`高亮区域`的'领域扩大'。 意思是高亮部分会侵蚀不是高亮的部分,使高亮部分越来越多。 */ Imgproc.dilate(src, outImage, structImage, new Point(-1, -1), 1); src = outImage; return src; }
Example 2
Source File: GeneralUtils.java From super-cloudops with Apache License 2.0 | 6 votes |
/** * 图像腐蚀 * * @param src * @return */ public static Mat erode(Mat src) { Mat outImage = new Mat(); // size 越小,腐蚀的单位越小,图片越接近原图 Mat structImage = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)); /** * 图像腐蚀 腐蚀说明: 图像的一部分区域与指定的核进行卷积, 求核的最`小`值并赋值给指定区域。 * 腐蚀可以理解为图像中`高亮区域`的'领域缩小'。 意思是高亮部分会被不是高亮部分的像素侵蚀掉,使高亮部分越来越少。 */ Imgproc.erode(src, outImage, structImage, new Point(-1, -1), 1); src = outImage; return src; }
Example 3
Source File: MainActivity.java From MOAAP with MIT License | 6 votes |
@Override protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) { //Put it there, just in case:) super.onActivityResult(requestCode, resultCode, imageReturnedIntent); switch(requestCode) { case SELECT_PHOTO: if(resultCode == RESULT_OK && read_external_storage_granted){ try { final Uri imageUri = imageReturnedIntent.getData(); final InputStream imageStream = getContentResolver().openInputStream(imageUri); final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream); src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4); Utils.bitmapToMat(selectedImage, src); src_gray = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC1); switch (ACTION_MODE) { case HomeActivity.GAUSSIAN_BLUR: Imgproc.GaussianBlur(src, src, new Size(9, 9), 0); break; case HomeActivity.MEAN_BLUR: Imgproc.blur(src, src, new Size(9, 9)); break; case HomeActivity.MEDIAN_BLUR: Imgproc.medianBlur(src, src, 9); break; case HomeActivity.SHARPEN: Mat kernel = new Mat(3, 3, CvType.CV_16SC1); //int[] values = {0, -1, 0, -1, 5, -1, 0, -1, 0}; Log.d("imageType", CvType.typeToString(src.type()) + ""); kernel.put(0, 0, 0, -1, 0, -1, 5, -1, 0, -1, 0); Imgproc.filter2D(src, src, src_gray.depth(), kernel); break; case HomeActivity.DILATE: Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY); Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY); Mat kernelDilate = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3)); Imgproc.dilate(src_gray, src_gray, kernelDilate); Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4); break; case HomeActivity.ERODE: Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY); Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY); Mat kernelErode = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)); Imgproc.erode(src_gray, src_gray, kernelErode); Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4); break; case HomeActivity.THRESHOLD: Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY); Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY); Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4); break; case HomeActivity.ADAPTIVE_THRESHOLD: Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY); Imgproc.adaptiveThreshold(src_gray, src_gray, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 3, 0); Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4); break; } Bitmap processedImage = Bitmap.createBitmap(src.cols(), src.rows(), Bitmap.Config.ARGB_8888); Log.i("imageType", CvType.typeToString(src.type()) + ""); Utils.matToBitmap(src, processedImage); ivImage.setImageBitmap(selectedImage); ivImageProcessed.setImageBitmap(processedImage); Log.i("process", "process done"); } catch (FileNotFoundException e) { e.printStackTrace(); } } break; } }
Example 4
Source File: OpenCVoperation.java From Human-hair-detection with Apache License 2.0 | 6 votes |
public void skinSegmentation() { matrix3_skindetection = new Mat(matrix2_grabcut.size(),matrix2_grabcut.type()); matrix3_skindetection.setTo(new Scalar(0,0,255)); Mat skinMask = new Mat(); Mat hsvMatrix = new Mat(); Scalar lower = new Scalar(0,48,80); Scalar upper = new Scalar(20,255,255); Imgproc.cvtColor(matrix2_grabcut, hsvMatrix, Imgproc.COLOR_BGR2HSV); Core.inRange(hsvMatrix, lower, upper, skinMask); Mat kernel =Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE,new Size(11,11)); Imgproc.erode(skinMask, skinMask, kernel); Imgproc.dilate(skinMask, skinMask, kernel); Imgproc.GaussianBlur(skinMask,skinMask, new Size(3,3), 0); Core.bitwise_and(matrix2_grabcut, matrix2_grabcut, matrix3_skindetection,skinMask); Imgcodecs.imwrite(resultDirectory+skinDetectionOutput , matrix3_skindetection); }
Example 5
Source File: OpenCVoperation.java From Human-hair-detection with Apache License 2.0 | 6 votes |
public void performErosion_Dilution() { erosion_dilutionMatrix = new Mat(this.matrix7_output.size(),this.matrix7_output.type()); int erosion_size=2; //erosion Mat element1 = Imgproc.getStructuringElement(Imgproc.MORPH_ERODE, new Size(2*erosion_size + 1, 2*erosion_size+1)); Imgproc.erode(matrix7_output, erosion_dilutionMatrix, element1); Imgcodecs.imwrite(resultDirectory+erosionOutput,erosion_dilutionMatrix); /* //dilation Mat element2 = Imgproc.getStructuringElement(Imgproc.MORPH_DILATE, new Size(2*erosion_size + 1, 2*erosion_size+1)); Imgproc.dilate(erosion_dilutionMatrix, erosion_dilutionMatrix, element2); Imgcodecs.imwrite(resultDirectory+this.dilationOutput,erosion_dilutionMatrix); */ }
Example 6
Source File: Morphology.java From go-bees with GNU General Public License v3.0 | 6 votes |
@Override public Mat process(@NonNull Mat frame) { if (frame.empty()) { Log.e("Invalid input frame."); return null; } Mat tmp = frame.clone(); // Step 1: erode to remove legs Imgproc.erode(tmp, tmp, KERNEL3); // Step 2: dilate to join bodies and heads Imgproc.dilate(tmp, tmp, KERNEL2); for (int i = 0; i < REPETITIONS_DILATE; i++) { Imgproc.dilate(tmp, tmp, kernelDilate); } // Step 3: erode to recover original size Imgproc.erode(tmp, tmp, KERNEL1); for (int i = 0; i < REPETITIONS_ERODE; i++) { Imgproc.erode(tmp, tmp, kernelErode); } return tmp; }
Example 7
Source File: MainActivity.java From Android_OCV_Movement_Detection with MIT License | 6 votes |
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { contours.clear(); //gray frame because it requires less resource to process mGray = inputFrame.gray(); //this function converts the gray frame into the correct RGB format for the BackgroundSubtractorMOG apply function Imgproc.cvtColor(mGray, mRgb, Imgproc.COLOR_GRAY2RGB); //apply detects objects moving and produces a foreground mask //the lRate updates dynamically dependent upon seekbar changes sub.apply(mRgb, mFGMask, lRate); //erode and dilate are used to remove noise from the foreground mask Imgproc.erode(mFGMask, mFGMask, new Mat()); Imgproc.dilate(mFGMask, mFGMask, new Mat()); //drawing contours around the objects by first called findContours and then calling drawContours //RETR_EXTERNAL retrieves only external contours //CHAIN_APPROX_NONE detects all pixels for each contour Imgproc.findContours(mFGMask, contours, new Mat(), Imgproc.RETR_EXTERNAL , Imgproc.CHAIN_APPROX_NONE); //draws all the contours in red with thickness of 2 Imgproc.drawContours(mRgb, contours, -1, new Scalar(255, 0, 0), 2); return mRgb; }
Example 8
Source File: CVProcessor.java From CVScanner with GNU General Public License v3.0 | 5 votes |
public static List<MatOfPoint> findContoursForMRZ(Mat src){ Mat img = src.clone(); src.release(); double ratio = getScaleRatio(img.size()); int width = (int) (img.size().width / ratio); int height = (int) (img.size().height / ratio); Size newSize = new Size(width, height); Mat resizedImg = new Mat(newSize, CvType.CV_8UC4); Imgproc.resize(img, resizedImg, newSize); Mat gray = new Mat(); Imgproc.cvtColor(resizedImg, gray, Imgproc.COLOR_BGR2GRAY); Imgproc.medianBlur(gray, gray, 3); //Imgproc.blur(gray, gray, new Size(3, 3)); Mat morph = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(13, 5)); Mat dilatedImg = new Mat(); Imgproc.morphologyEx(gray, dilatedImg, Imgproc.MORPH_BLACKHAT, morph); gray.release(); Mat gradX = new Mat(); Imgproc.Sobel(dilatedImg, gradX, CvType.CV_32F, 1, 0); dilatedImg.release(); Core.convertScaleAbs(gradX, gradX, 1, 0); Core.MinMaxLocResult minMax = Core.minMaxLoc(gradX); Core.convertScaleAbs(gradX, gradX, (255/(minMax.maxVal - minMax.minVal)), - ((minMax.minVal * 255) / (minMax.maxVal - minMax.minVal))); Imgproc.morphologyEx(gradX, gradX, Imgproc.MORPH_CLOSE, morph); Mat thresh = new Mat(); Imgproc.threshold(gradX, thresh, 0, 255, Imgproc.THRESH_OTSU); gradX.release(); morph.release(); morph = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(21, 21)); Imgproc.morphologyEx(thresh, thresh, Imgproc.MORPH_CLOSE, morph); Imgproc.erode(thresh, thresh, new Mat(), new Point(-1, -1), 4); morph.release(); int col = (int) resizedImg.size().width; int p = (int) (resizedImg.size().width * 0.05); int row = (int) resizedImg.size().height; for(int i = 0; i < row; i++) { for(int j = 0; j < p; j++){ thresh.put(i, j, 0); thresh.put(i, col-j, 0); } } List<MatOfPoint> contours = new ArrayList<>(); Mat hierarchy = new Mat(); Imgproc.findContours(thresh, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); hierarchy.release(); Log.d(TAG, "contours found: " + contours.size()); Collections.sort(contours, new Comparator<MatOfPoint>() { @Override public int compare(MatOfPoint o1, MatOfPoint o2) { return Double.valueOf(Imgproc.contourArea(o2)).compareTo(Imgproc.contourArea(o1)); } }); return contours; }
Example 9
Source File: Filter.java From FTCVision with MIT License | 5 votes |
/** * Erode the image using morphological transformations * * @param img Image matrix * @param amount Amount to erode = 0 */ public static void erode(Mat img, int amount) { Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(2 * amount + 1, 2 * amount + 1), new Point(amount, amount)); Imgproc.erode(img, img, kernel); }
Example 10
Source File: DarkChannelPriorDehaze.java From OptimizedImageEnhance with MIT License | 4 votes |
public static Mat enhance(Mat image, double krnlRatio, double minAtmosLight, double eps) { image.convertTo(image, CvType.CV_32F); // extract each color channel List<Mat> rgb = new ArrayList<>(); Core.split(image, rgb); Mat rChannel = rgb.get(0); Mat gChannel = rgb.get(1); Mat bChannel = rgb.get(2); int rows = rChannel.rows(); int cols = rChannel.cols(); // derive the dark channel from original image Mat dc = rChannel.clone(); for (int i = 0; i < image.rows(); i++) { for (int j = 0; j < image.cols(); j++) { double min = Math.min(rChannel.get(i, j)[0], Math.min(gChannel.get(i, j)[0], bChannel.get(i, j)[0])); dc.put(i, j, min); } } // minimum filter int krnlSz = Double.valueOf(Math.max(Math.max(rows * krnlRatio, cols * krnlRatio), 3.0)).intValue(); Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(krnlSz, krnlSz), new Point(-1, -1)); Imgproc.erode(dc, dc, kernel); // get coarse transmission map Mat t = dc.clone(); Core.subtract(t, new Scalar(255.0), t); Core.multiply(t, new Scalar(-1.0), t); Core.divide(t, new Scalar(255.0), t); // obtain gray scale image Mat gray = new Mat(); Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGB2GRAY); Core.divide(gray, new Scalar(255.0), gray); // refine transmission map int r = krnlSz * 4; t = Filters.GuidedImageFilter(gray, t, r, eps); // get minimum atmospheric light minAtmosLight = Math.min(minAtmosLight, Core.minMaxLoc(dc).maxVal); // dehaze each color channel rChannel = dehaze(rChannel, t, minAtmosLight); gChannel = dehaze(gChannel, t, minAtmosLight); bChannel = dehaze(bChannel, t, minAtmosLight); // merge three color channels to a image Mat outval = new Mat(); Core.merge(new ArrayList<>(Arrays.asList(rChannel, gChannel, bChannel)), outval); outval.convertTo(outval, CvType.CV_8UC1); return outval; }
Example 11
Source File: AAVActivity.java From AAV with GNU General Public License v2.0 | 4 votes |
@Override public Mat onCameraFrame(CvCameraViewFrame inputFrame) { synchronized (inputFrame) { _rgbaImage = inputFrame.rgba(); if (android.os.Build.MODEL.equalsIgnoreCase("Nexus 5X")) { Core.flip(_rgbaImage, _rgbaImage, -1); } double current_contour; // In contrast to the C++ interface, Android API captures images in the RGBA format. // Also, in HSV space, only the hue determines which color it is. Saturation determines // how 'white' the color is, and Value determines how 'dark' the color is. Imgproc.cvtColor(_rgbaImage, _hsvMat, Imgproc.COLOR_RGB2HSV_FULL); Core.inRange(_hsvMat, _lowerThreshold, _upperThreshold, _processedMat); // Imgproc.dilate(_processedMat, _dilatedMat, new Mat()); Imgproc.erode(_processedMat, _dilatedMat, new Mat()); Imgproc.findContours(_dilatedMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); MatOfPoint2f points = new MatOfPoint2f(); _contourArea = 7; for (int i = 0, n = contours.size(); i < n; i++) { current_contour = Imgproc.contourArea(contours.get(i)); if (current_contour > _contourArea) { _contourArea = current_contour; contours.get(i).convertTo(points, CvType.CV_32FC2); // contours.get(x) is a single MatOfPoint, but to use minEnclosingCircle we need to pass a MatOfPoint2f so we need to do a // conversion } } if (!points.empty() && _contourArea > MIN_CONTOUR_AREA) { Imgproc.minEnclosingCircle(points, _centerPoint, null); // Core.circle(_rgbaImage, _centerPoint, 3, new Scalar(255, 0, 0), Core.FILLED); if (_showContourEnable) Core.circle(_rgbaImage, _centerPoint, (int) Math.round(Math.sqrt(_contourArea / Math.PI)), new Scalar(255, 0, 0), 3, 8, 0);// Core.FILLED); } contours.clear(); } return _rgbaImage; }