Java Code Examples for org.opencv.core.Mat#copyTo()
The following examples show how to use
org.opencv.core.Mat#copyTo() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DogeCVDetector.java From DogeCV with GNU General Public License v3.0 | 6 votes |
@Override public final Mat processFrame(Mat input) { size = input.size(); Log.d("DogeCVDetector", "Input mat size:" + input.size()); input.copyTo(workingMat); if(workingMat.empty()){ return input; } workingMat = process(workingMat); //Print Info Imgproc.putText(workingMat,"DogeCV 2020.1 " + detectorName + ": " + stageToRenderToViewport.toString(), new Point(5,30),0,0.5,new Scalar(0,255,255),2); return workingMat; }
Example 2
Source File: StepByStepTestActivity.java From CVScanner with GNU General Public License v3.0 | 6 votes |
Mat buildSkeleton(Mat img){ Mat morph = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 3)); Mat skel = new Mat(img.size(), CvType.CV_8UC1, Scalar.all(0)); Mat eroded = new Mat(); Mat temp = new Mat(); boolean done = false; do{ Imgproc.morphologyEx(img, eroded, Imgproc.MORPH_ERODE, morph); Imgproc.morphologyEx(eroded, temp, Imgproc.MORPH_DILATE, morph); Core.subtract(img, temp, temp); Core.bitwise_or(skel, temp, skel); eroded.copyTo(img); done = Core.countNonZero(img) == 0; }while (!done); return skel; }
Example 3
Source File: ImgprocessUtils.java From classchecks with Apache License 2.0 | 6 votes |
/** * * @Title: resize * @Description: 缩放图片 * @param srcImg * @param scale * @param scaledWidth * @return * Mat * @throws */ public static Mat resize(Mat srcImg, float scale, int scaledWidth) { Mat inputImg = new Mat(); // 计算收缩比例 //float scale = srcImg.cols() / (float) scaledWidth; if (srcImg.cols() > scaledWidth) { // 缩小图像,同时保持相同的纵横比 // Math.round == cvRound(javacv没有cvRound) int scaledHeight = Math.round(srcImg.rows() / scale); Imgproc.resize(srcImg, inputImg, new Size(scaledWidth, scaledHeight)); } else { // 当图片足够小的时候,直接使用 srcImg.copyTo(inputImg); } return inputImg; }
Example 4
Source File: DetectionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 6 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); Mat img = new Mat(); imgRgba.copyTo(img); List<Mat> images = ppF.getCroppedImage(img); Rect[] faces = ppF.getFacesForRecognition(); // Selfie / Mirror mode if(front_camera){ Core.flip(imgRgba,imgRgba,1); } if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){ // skip return imgRgba; } else { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); for(int i = 0; i<faces.length; i++){ MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], "", front_camera); } return imgRgba; } }
Example 5
Source File: RecognitionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 6 votes |
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); Mat img = new Mat(); imgRgba.copyTo(img); List<Mat> images = ppF.getProcessedImage(img, PreProcessorFactory.PreprocessingMode.RECOGNITION); Rect[] faces = ppF.getFacesForRecognition(); // Selfie / Mirror mode if(front_camera){ Core.flip(imgRgba,imgRgba,1); } if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){ // skip return imgRgba; } else { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); for(int i = 0; i<faces.length; i++){ MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""), front_camera); } return imgRgba; } }
Example 6
Source File: DctConverter.java From BlindWatermark with Apache License 2.0 | 5 votes |
@Override public void addImageWatermark(Mat com, Mat watermark) { Mat mask = new Mat(); inRange(watermark, new Scalar(0, 0, 0, 0), new Scalar(0, 0, 0, 0), mask); Mat i2 = new Mat(watermark.size(), watermark.type(), new Scalar(2, 2, 2, 0)); i2.copyTo(watermark, mask); watermark.convertTo(watermark, CV_32F); int row = (com.rows() - watermark.rows()) >> 1; int col = (com.cols() - watermark.cols()) >> 1; copyMakeBorder(watermark, watermark, row, row, col, col, BORDER_CONSTANT, Scalar.all(0)); Utils.fixSize(watermark, com); addWeighted(watermark, 0.03, com, 1, 0.0, com); }
Example 7
Source File: FaceRecognitionActivity.java From AndroidFaceRecognizer with MIT License | 5 votes |
private void detectFaceOnFrame(final Mat frame){ Thread t = new Thread(new Runnable() { @Override public void run() { detectionInProgress = true; if (mAbsoluteFaceSize == 0) { int height = frame.rows(); if(Math.round(height * mRelativeFaceSize) > 0) { mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); } FaceDetectionUtils.mNativeDetector.setMinFaceSize(mAbsoluteFaceSize); } Mat faceMat = FaceDetection.detectFaces(null, frame, capturingImage); if(faceMat != null) { long now = System.currentTimeMillis(); if(now - lastDetectionTime > 400){ Mat m = new Mat(faceMat.rows(), faceMat.cols(), faceMat.type()); faceMat.copyTo(m); onFaceCaptured(m); } lastDetectionTime = now; } detectionInProgress = false; } }); if(!detectionInProgress) { t.start(); } }
Example 8
Source File: FaceDetectionActivity.java From AndroidFaceRecognizer with MIT License | 5 votes |
private void detectFaceOnFrame(final Mat frame){ Thread t = new Thread(new Runnable() { @Override public void run() { detectionInProgress = true; if (mAbsoluteFaceSize == 0) { int height = frame.rows(); if(Math.round(height * mRelativeFaceSize) > 0) { mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); } FaceDetectionUtils.mNativeDetector.setMinFaceSize(mAbsoluteFaceSize); } Mat faceMat = FaceDetection.detectFaces(null, frame, capturingImage); if(faceMat != null) { long now = System.currentTimeMillis(); if(now - lastDetectionTime > 400){ Mat m = new Mat(faceMat.rows(), faceMat.cols(), faceMat.type()); faceMat.copyTo(m); onFaceCaptured(m); } lastDetectionTime = now; } detectionInProgress = false; } }); if(!detectionInProgress) { t.start(); } }
Example 9
Source File: PreProcessorFactory.java From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 | 5 votes |
private List<Mat> getCopiedImageList(Mat img){ List<Mat> images = new ArrayList<Mat>(); Mat imgCopy = new Mat(); img.copyTo(imgCopy); images.add(imgCopy); return images; }
Example 10
Source File: HoughCircles.java From opencv-fun with GNU Affero General Public License v3.0 | 5 votes |
public static void main (String[] args) { CVLoader.load(); Mat orig = Highgui.imread("data/topdown-6.jpg"); Mat gray = new Mat(); orig.copyTo(gray); // blur // Imgproc.medianBlur(gray, gray, 5); // Imgproc.GaussianBlur(gray, gray, new Size(3, 3), 100); // convert to grayscale Imgproc.cvtColor(gray, gray, Imgproc.COLOR_BGR2GRAY); // do hough circles Mat circles = new Mat(); int minRadius = 10; int maxRadius = 18; Imgproc.HoughCircles(gray, circles, Imgproc.CV_HOUGH_GRADIENT, 1, minRadius, 120, 10, minRadius, maxRadius); System.out.println(circles); ImgWindow.newWindow(gray); ImgWindow wnd = ImgWindow.newWindow(orig); while(!wnd.closed) { wnd.setImage(orig); Graphics2D g = wnd.begin(); g.setColor(Color.MAGENTA); g.setStroke(new BasicStroke(3)); for(int i = 0; i < circles.cols(); i++) { double[] circle = circles.get(0, i); g.drawOval((int)circle[0] - (int)circle[2], (int)circle[1] - (int)circle[2], (int)circle[2] * 2, (int)circle[2] * 2); } wnd.end(); } }
Example 11
Source File: FtcTestOpenCv.java From FtcSamples with MIT License | 5 votes |
/** * This method rotate the image to the specified angle. * * @param src specifies the image to be rotated. * @param dst specifies the destination to put the rotated image. * @param angle specifies the rotation angle. */ private void rotateImage(Mat src, Mat dst, double angle) { angle %= 360.0; if (angle == 0.0) { src.copyTo(dst); } else if (angle == 90.0 || angle == -270.0) { Core.transpose(src, dst); Core.flip(dst, dst, 1); } else if (angle == 180.0 || angle == -180.0) { Core.flip(src, dst, -1); } else if (angle == 270.0 || angle == -90.0) { Core.transpose(src, dst); Core.flip(dst, dst, 0); } else { Mat rotMat = Imgproc.getRotationMatrix2D( new Point(src.cols()/2.0, src.rows()/2.0), angle, 1.0); Imgproc.warpAffine(src, dst, rotMat, src.size()); } }
Example 12
Source File: ImgprocessUtils.java From classchecks with Apache License 2.0 | 5 votes |
/** * * @Title: rectangle * @Description: 以检测的人脸图像区域数组在源图像上画矩形框 * @param mImgSRC * @param rects * @return * Mat */ public static Mat rectangle(Mat mImgSRC, Rect ...rects ) { Mat tmp = new Mat(); mImgSRC.copyTo(tmp); for(Rect r : rects) { Imgproc.rectangle(tmp, new Point(r.x, r.y), new Point(r.x + r.width, r.y + r.height), new Scalar(0, 0, 255)); } return tmp; }
Example 13
Source File: ImgprocessUtils.java From classchecks with Apache License 2.0 | 5 votes |
/** * 图像归一化 * @Title: norm_0_255 * @Description: TODO(这里用一句话描述这个方法的作用) * @param src * @return * Mat * @throws */ public static Mat norm_0_255(Mat src) { // 创建和返回一个归一化后的图像矩阵 Mat dst = new Mat(); switch(src.channels()) { case 1: Core.normalize(src, dst, 0, 255, Core.NORM_MINMAX, CvType.CV_8UC1); break; case 3: Core.normalize(src, dst, 0, 255, Core.NORM_MINMAX, CvType.CV_8UC3); break; default: src.copyTo(dst);break; } return dst; }
Example 14
Source File: AuthenticationActivity.java From ml-authentication with Apache License 2.0 | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); // Do not change screen brightness manually during test phase, due to the unknown location of the different test users. // M.Schälchli 20170129 // if (isDeviceRooted){ // DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba); // } long currentTime = new Date().getTime(); if ((!tensorFlowLoadingThread.isAlive()) && ((startTimeAuthenticationAnimation + AUTHENTICATION_ANIMATION_TIME) < currentTime)){ prepareForAuthentication(); if (!recognitionThread.isAlive() && recognitionThreadStarted) { List<Student> students = recognitionThread.getRecognizedStudent(); Student student = new Student(); if (students.size() == 1){ student = students.get(0); } numberOfTries++; Log.i(getClass().getName(), "Number of authentication/recognition tries: " + numberOfTries); if ((student != null) && (students.size() == 1)) { AuthenticationHelper.updateCurrentStudent(student, getApplicationContext(), false); finish(); } else if (numberOfTries >= NUMBER_OF_MAXIMUM_TRIES) { startStudentImageCollectionActivity(true); } recognitionThreadStarted = false; } Mat imgCopy = new Mat(); // Store original image for face recognition imgRgba.copyTo(imgCopy); // Mirror front camera image Core.flip(imgRgba,imgRgba,1); Rect face = new Rect(); boolean isFaceInsideFrame = false; boolean faceDetected = false; List<Mat> images = ppF.getCroppedImage(imgCopy); if (images != null && images.size() == 1){ Mat img = images.get(0); if (img != null){ Rect[] faces = ppF.getFacesForRecognition(); if (faces != null && faces.length == 1){ faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); face = faces[0]; faceDetected = true; // Reset startTimeFallback for fallback timeout, because at least one face has been detected startTimeFallback = currentTime; isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face); if (isFaceInsideFrame){ if (!recognitionThread.isAlive() && !recognitionThreadStarted){ if (!activityStopped){ mediaPlayerAnimalSound.start(); recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao); recognitionThread.setImg(img); recognitionThread.start(); recognitionThreadStarted = true; } } } } } } if (faceDetected && !isFaceInsideFrame && !activityStopped){ DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face); AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound); } if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){ // Prevent from second execution of fallback activity because of threading startTimeFallback = currentTime; DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName()); finish(); } EnvironmentSettings.freeMemory(); } return imgRgba; }
Example 15
Source File: StudentImageCollectionActivity.java From ml-authentication with Apache License 2.0 | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { final Mat imgRgba = inputFrame.rgba(); // Do not change screen brightness manually during test phase, due to the unknown location of the different test users. // M.Schälchli 20170129 // if (isDeviceRooted){ // DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba); // } long currentTime = new Date().getTime(); if (authenticationAnimationAlreadyPlayed || ((startTimeAuthenticationAnimation + AuthenticationActivity.AUTHENTICATION_ANIMATION_TIME) < currentTime)){ prepareForAuthentication(); Mat imgCopy = new Mat(); // Store original image for face recognition imgRgba.copyTo(imgCopy); // Mirror front camera image Core.flip(imgRgba,imgRgba,1); Rect face = new Rect(); boolean isFaceInsideFrame = false; boolean faceDetected = false; if((lastTime + TIMER_DIFF) < currentTime){ lastTime = currentTime; List<Mat> images = ppF.getCroppedImage(imgCopy); if((images != null) && (images.size() == 1)){ Mat img = images.get(0); if(img != null) { Rect[] faces = ppF.getFacesForRecognition(); if ((faces != null) && (faces.length == 1)) { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); face = faces[0]; faceDetected = true; // Reset startTimeFallback for fallback timeout, because at least one face has been detected startTimeFallback = currentTime; isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face); if (isFaceInsideFrame){ if (!activityStopped){ mediaPlayerAnimalSound.start(); studentImages.add(img); // Stop after NUMBER_OF_IMAGES (settings option) if(imagesProcessed == NUMBER_OF_IMAGES){ storeStudentImages(); finish(); } imagesProcessed++; } } } } } } if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){ // Prevent from second execution of fallback activity because of threading startTimeFallback = currentTime; DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName()); finish(); } if (faceDetected && !isFaceInsideFrame && !activityStopped){ DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face); AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound); } EnvironmentSettings.freeMemory(); } return imgRgba; }
Example 16
Source File: LeviColorFilter.java From DogeCV with GNU General Public License v3.0 | 4 votes |
/** * Process a image and return a mask * @param input - Input image to process * @param mask - Output mask */ @Override public void process(Mat input, Mat mask) { channels = new ArrayList<>(); switch(color){ case RED: if(threshold == -1){ threshold = 164; } Imgproc.cvtColor(input, input, Imgproc.COLOR_RGB2Lab); Imgproc.GaussianBlur(input,input,new Size(3,3),0); Core.split(input, channels); Imgproc.threshold(channels.get(1), mask, threshold, 255, Imgproc.THRESH_BINARY); break; case BLUE: if(threshold == -1){ threshold = 145; } Imgproc.cvtColor(input, input, Imgproc.COLOR_RGB2YUV); Imgproc.GaussianBlur(input,input,new Size(3,3),0); Core.split(input, channels); Imgproc.threshold(channels.get(1), mask, threshold, 255, Imgproc.THRESH_BINARY); break; case WHITE: if(threshold == -1) { threshold = 150; } Imgproc.cvtColor(input, input, Imgproc.COLOR_RGB2Lab); Imgproc.GaussianBlur(input,input,new Size(3,3),0); Core.split(input, channels); Core.inRange(channels.get(0), new Scalar(threshold, 150, 40), new Scalar(255, 150, 150), mask); break; case YELLOW: if(threshold == -1){ threshold = 70; } Mat lab = new Mat(input.size(), 0); Imgproc.cvtColor(input, lab, Imgproc.COLOR_RGB2Lab); Mat temp = new Mat(); Core.inRange(input, new Scalar(0,0,0), new Scalar(255,255,164), temp); Mat mask2 = new Mat(input.size(), 0); temp.copyTo(mask2); input.copyTo(input, mask2); mask2.release(); temp.release(); lab.release(); Imgproc.cvtColor(input, input, Imgproc.COLOR_RGB2YUV); Imgproc.GaussianBlur(input,input,new Size(3,3),0); Core.split(input, channels); if(channels.size() > 0){ Imgproc.threshold(channels.get(1), mask, threshold, 255, Imgproc.THRESH_BINARY_INV); } break; } for(int i=0;i<channels.size();i++){ channels.get(i).release(); } input.release(); }
Example 17
Source File: AuthenticationActivity.java From ml-authentication with Apache License 2.0 | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); // Do not change screen brightness manually during test phase, due to the unknown location of the different test users. // M.Schälchli 20170129 // if (isDeviceRooted){ // DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba); // } long currentTime = new Date().getTime(); if ((!tensorFlowLoadingThread.isAlive()) && ((startTimeAuthenticationAnimation + AUTHENTICATION_ANIMATION_TIME) < currentTime)){ prepareForAuthentication(); if (!recognitionThread.isAlive() && recognitionThreadStarted) { List<Student> students = recognitionThread.getRecognizedStudent(); Student student = new Student(); if (students.size() == 1){ student = students.get(0); } numberOfTries++; Log.i(getClass().getName(), "Number of authentication/recognition tries: " + numberOfTries); if ((student != null) && (students.size() == 1)) { AuthenticationHelper.updateCurrentStudent(student, getApplicationContext(), false); finish(); } else if (numberOfTries >= NUMBER_OF_MAXIMUM_TRIES) { startStudentImageCollectionActivity(true); } recognitionThreadStarted = false; } Mat imgCopy = new Mat(); // Store original image for face recognition imgRgba.copyTo(imgCopy); // Mirror front camera image Core.flip(imgRgba,imgRgba,1); Rect face = new Rect(); boolean isFaceInsideFrame = false; boolean faceDetected = false; List<Mat> images = ppF.getCroppedImage(imgCopy); if (images != null && images.size() == 1){ Mat img = images.get(0); if (img != null){ Rect[] faces = ppF.getFacesForRecognition(); if (faces != null && faces.length == 1){ faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); face = faces[0]; faceDetected = true; // Reset startTimeFallback for fallback timeout, because at least one face has been detected startTimeFallback = currentTime; isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face); if (isFaceInsideFrame){ if (!recognitionThread.isAlive() && !recognitionThreadStarted){ if (!activityStopped){ mediaPlayerAnimalSound.start(); recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao); recognitionThread.setImg(img); recognitionThread.start(); recognitionThreadStarted = true; } } } } } } if (faceDetected && !isFaceInsideFrame && !activityStopped){ DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face); AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound); } if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){ // Prevent from second execution of fallback activity because of threading startTimeFallback = currentTime; DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName()); finish(); } EnvironmentSettings.freeMemory(); } return imgRgba; }
Example 18
Source File: StudentImageCollectionActivity.java From ml-authentication with Apache License 2.0 | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { final Mat imgRgba = inputFrame.rgba(); // Do not change screen brightness manually during test phase, due to the unknown location of the different test users. // M.Schälchli 20170129 // if (isDeviceRooted){ // DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba); // } long currentTime = new Date().getTime(); if (authenticationAnimationAlreadyPlayed || ((startTimeAuthenticationAnimation + AuthenticationActivity.AUTHENTICATION_ANIMATION_TIME) < currentTime)){ prepareForAuthentication(); Mat imgCopy = new Mat(); // Store original image for face recognition imgRgba.copyTo(imgCopy); // Mirror front camera image Core.flip(imgRgba,imgRgba,1); Rect face = new Rect(); boolean isFaceInsideFrame = false; boolean faceDetected = false; if((lastTime + TIMER_DIFF) < currentTime){ lastTime = currentTime; List<Mat> images = ppF.getCroppedImage(imgCopy); if((images != null) && (images.size() == 1)){ Mat img = images.get(0); if(img != null) { Rect[] faces = ppF.getFacesForRecognition(); if ((faces != null) && (faces.length == 1)) { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); face = faces[0]; faceDetected = true; // Reset startTimeFallback for fallback timeout, because at least one face has been detected startTimeFallback = currentTime; isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face); if (isFaceInsideFrame){ if (!activityStopped){ mediaPlayerAnimalSound.start(); studentImages.add(img); // Stop after NUMBER_OF_IMAGES (settings option) if(imagesProcessed == NUMBER_OF_IMAGES){ storeStudentImages(); finish(); } imagesProcessed++; } } } } } } if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){ // Prevent from second execution of fallback activity because of threading startTimeFallback = currentTime; DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName()); finish(); } if (faceDetected && !isFaceInsideFrame && !activityStopped){ DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face); AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound); } EnvironmentSettings.freeMemory(); } return imgRgba; }
Example 19
Source File: StoneDetector.java From DogeCV with GNU General Public License v3.0 | 4 votes |
@Override public Mat process(Mat input) { screenPositions.clear(); foundRects.clear(); input.copyTo(rawImage); input.copyTo(workingMat); input.copyTo(displayMat); input.copyTo(yellowMask); // Imgproc.GaussianBlur(workingMat,workingMat,new Size(5,5),0); filter.process(workingMat.clone(), yellowMask); List<MatOfPoint> contoursYellow = new ArrayList<>(); Imgproc.findContours(yellowMask, contoursYellow, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); Imgproc.drawContours(displayMat,contoursYellow,-1,new Scalar(230,70,70),2); // Current result ArrayList<Rect> bestRects = new ArrayList<>(); double bestDifference = Double.MAX_VALUE; // MAX_VALUE since less difference = better Collections.sort(contoursYellow, new Comparator<MatOfPoint>() { @Override public int compare(MatOfPoint matOfPoint, MatOfPoint t1) { return calculateScore(matOfPoint) > calculateScore(t1) ? 1 : 0; } }); List<MatOfPoint> subList = contoursYellow; if (contoursYellow.size() > stonesToFind) { subList = contoursYellow.subList(0, stonesToFind); } for (MatOfPoint contour : subList) { Rect rect = Imgproc.boundingRect(contour); // Show chosen result Imgproc.rectangle(displayMat, rect.tl(), rect.br(), new Scalar(255, 0, 0), 4); Imgproc.putText(displayMat, "Chosen", rect.tl(), 0, 1, new Scalar(255, 255, 255)); screenPositions.add(new Point(rect.x, rect.y)); foundRects.add(rect); } if (foundRects.size() > 0) { found = true; } else { found = false; } switch (stageToRenderToViewport) { case THRESHOLD: { Imgproc.cvtColor(yellowMask, yellowMask, Imgproc.COLOR_GRAY2BGR); return yellowMask; } case RAW_IMAGE: { return rawImage; } default: { return displayMat; } } }
Example 20
Source File: ProcessHelper.java From OpenCV-android with Apache License 2.0 | 4 votes |
/** * 直线检测 * * @param origin 原始bitmap * @param callback 回调 */ public void hough(Bitmap origin, ProcessCallback callback) { if (origin == null) { return; } try { Bitmap bitmap = Bitmap.createBitmap(origin.getWidth(), origin.getHeight(), Bitmap.Config.RGB_565); Utils.bitmapToMat(origin, rgbMat); Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY); Mat edges = new Mat(); Mat src = new Mat(origin.getHeight(), origin.getWidth(), CvType.CV_8UC4); Mat lines = new Mat(); // 拷贝 Mat origination = new Mat(src.size(), CvType.CV_8UC1); src.copyTo(origination); // 通过Canny得到边缘图 Imgproc.cvtColor(origination, grayMat, Imgproc.COLOR_BGR2GRAY); Imgproc.Canny(grayMat, edges, 50, 200); // 获取直线图 Imgproc.HoughLinesP(edges, lines, 1, Math.PI / 180, 10, 0, 10); Mat houghLines = new Mat(); houghLines.create(edges.rows(), edges.cols(), CvType.CV_8UC1); // 绘制直线 for (int i = 0; i < lines.rows(); i++) { double[] points = lines.get(i, 0); if (null != points) { double x1, y1, x2, y2; x1 = points[0]; y1 = points[1]; x2 = points[2]; y2 = points[3]; Point pt1 = new Point(x1, y1); Point pt2 = new Point(x2, y2); Imgproc.line(houghLines, pt1, pt2, new Scalar(55, 100, 195), 3); } } Utils.matToBitmap(houghLines, bitmap); callback.onSuccess(bitmap); } catch (Exception e) { callback.onFailed(e.getMessage()); } }