Java Code Examples for org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame#gray()
The following examples show how to use
org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame#gray() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MainActivity.java From Android_OCV_Movement_Detection with MIT License | 6 votes |
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { contours.clear(); //gray frame because it requires less resource to process mGray = inputFrame.gray(); //this function converts the gray frame into the correct RGB format for the BackgroundSubtractorMOG apply function Imgproc.cvtColor(mGray, mRgb, Imgproc.COLOR_GRAY2RGB); //apply detects objects moving and produces a foreground mask //the lRate updates dynamically dependent upon seekbar changes sub.apply(mRgb, mFGMask, lRate); //erode and dilate are used to remove noise from the foreground mask Imgproc.erode(mFGMask, mFGMask, new Mat()); Imgproc.dilate(mFGMask, mFGMask, new Mat()); //drawing contours around the objects by first called findContours and then calling drawContours //RETR_EXTERNAL retrieves only external contours //CHAIN_APPROX_NONE detects all pixels for each contour Imgproc.findContours(mFGMask, contours, new Mat(), Imgproc.RETR_EXTERNAL , Imgproc.CHAIN_APPROX_NONE); //draws all the contours in red with thickness of 2 Imgproc.drawContours(mRgb, contours, -1, new Scalar(255, 0, 0), 2); return mRgb; }
Example 2
Source File: MainActivity.java From open-quartz with Apache License 2.0 | 6 votes |
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); mGray = inputFrame.gray(); switch (MainActivity.viewMode) { case MainActivity.VIEW_MODE_RGBA: return mRgba; case MainActivity.VIEW_MODE_HIST: return mRgba; case MainActivity.VIEW_MODE_CANNY: Imgproc.Canny(mGray, mIntermediateMat, 80, 100); Imgproc.cvtColor(mIntermediateMat, mGray, Imgproc.COLOR_GRAY2BGRA, 4); return mGray; case MainActivity.VIEW_MODE_SOBEL: Imgproc.Sobel(mGray, mGray, CvType.CV_8U, 1, 1); // Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0); Imgproc.cvtColor(mGray, mGray, Imgproc.COLOR_GRAY2BGRA, 4); return mGray; case MainActivity.VIEW_MODE_PIXELIZE: Imgproc.resize(mGray, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST); Imgproc.resize(mIntermediateMat, mRgba, mRgba.size(), 0.0, 0.0, Imgproc.INTER_NEAREST); return mRgba; case MainActivity.VIEW_MODE_GRAY: return mGray; case MainActivity.VIEW_MODE_FEATURES: FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr()); return mRgba; default: return mRgba; } }
Example 3
Source File: Tutorial2Activity.java From OpenCV-AndroidSamples with MIT License | 5 votes |
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { final int viewMode = mViewMode; switch (viewMode) { case VIEW_MODE_GRAY: // input frame has gray scale format Imgproc.cvtColor(inputFrame.gray(), mRgba, Imgproc.COLOR_GRAY2RGBA, 4); break; case VIEW_MODE_RGBA: // input frame has RBGA format mRgba = inputFrame.rgba(); break; case VIEW_MODE_CANNY: // input frame has gray scale format mRgba = inputFrame.rgba(); Imgproc.Canny(inputFrame.gray(), mIntermediateMat, 80, 100); Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4); break; case VIEW_MODE_FEATURES: // input frame has RGBA format mRgba = inputFrame.rgba(); mGray = inputFrame.gray(); FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr()); break; } return mRgba; }
Example 4
Source File: FaceDetectionActivity.java From OpenCV-AndroidSamples with MIT License | 5 votes |
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); mGray = inputFrame.gray(); if (mAbsoluteFaceSize == 0) { int height = mGray.rows(); if (Math.round(height * mRelativeFaceSize) > 0) { mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); } mNativeDetector.setMinFaceSize(mAbsoluteFaceSize); } MatOfRect faces = new MatOfRect(); if (mDetectorType == JAVA_DETECTOR) { if (mJavaDetector != null) mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); } else if (mDetectorType == NATIVE_DETECTOR) { if (mNativeDetector != null) mNativeDetector.detect(mGray, faces); } else { Log.e(TAG, "Detection method is not selected!"); } Rect[] facesArray = faces.toArray(); for (int i = 0; i < facesArray.length; i++) Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3); return mRgba; }
Example 5
Source File: FaceDetectionActivity.java From AndroidFaceRecognizer with MIT License | 5 votes |
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); mGray = inputFrame.gray(); if(!detectionInProgress){ Mat image = new Mat(mGray.rows(), mGray.cols(), mGray.type()); mGray.copyTo(image); detectFaceOnFrame(image); } return mRgba; }
Example 6
Source File: FdActivity.java From open-quartz with Apache License 2.0 | 5 votes |
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); mGray = inputFrame.gray(); if (mAbsoluteFaceSize == 0) { int height = mGray.rows(); if (Math.round(height * mRelativeFaceSize) > 0) { mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); } } MatOfRect faces = new MatOfRect(); if (mJavaDetector != null) { mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); } // Draw rectangles Rect[] facesArray = faces.toArray(); for (int i = 0; i < facesArray.length; i++) { Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3); } return mRgba; }
Example 7
Source File: MainActivity.java From MOAAP with MIT License | 4 votes |
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { final int viewMode = mViewMode; switch (viewMode) { case VIEW_MODE_OPTICAL_FLOW: mGray = inputFrame.gray(); if(features.toArray().length==0){ int rowStep = 50, colStep = 100; int nRows = mGray.rows()/rowStep, nCols = mGray.cols()/colStep; // Log.d(TAG, "\nRows: "+nRows+"\nCols: "+nCols+"\n"); Point points[] = new Point[nRows*nCols]; for(int i=0; i<nRows; i++){ for(int j=0; j<nCols; j++){ points[i*nCols+j]=new Point(j*colStep, i*rowStep); // Log.d(TAG, "\nRow: "+i*rowStep+"\nCol: "+j*colStep+"\n: "); } } features.fromArray(points); prevFeatures.fromList(features.toList()); mPrevGray = mGray.clone(); break; } nextFeatures.fromArray(prevFeatures.toArray()); Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err); List<Point> prevList=features.toList(), nextList=nextFeatures.toList(); Scalar color = new Scalar(255); for(int i = 0; i<prevList.size(); i++){ // Core.circle(mGray, prevList.get(i), 5, color); Imgproc.line(mGray, prevList.get(i), nextList.get(i), color); } mPrevGray = mGray.clone(); break; case VIEW_MODE_KLT_TRACKER: mGray = inputFrame.gray(); if(features.toArray().length==0){ Imgproc.goodFeaturesToTrack(mGray, features, 10, 0.01, 10); Log.d(TAG, features.toList().size()+""); prevFeatures.fromList(features.toList()); mPrevGray = mGray.clone(); // prevFeatures.fromList(nextFeatures.toList()); break; } // OpticalFlow(mPrevGray.getNativeObjAddr(), mGray.getNativeObjAddr(), prevFeatures.getNativeObjAddr(), nextFeatures.getNativeObjAddr()); Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err); List<Point> drawFeature = nextFeatures.toList(); // Log.d(TAG, drawFeature.size()+""); for(int i = 0; i<drawFeature.size(); i++){ Point p = drawFeature.get(i); Imgproc.circle(mGray, p, 5, new Scalar(255)); } mPrevGray = mGray.clone(); prevFeatures.fromList(nextFeatures.toList()); break; default: mViewMode = VIEW_MODE_KLT_TRACKER; } return mGray; }
Example 8
Source File: ShowCameraViewActivity.java From FaceT with Mozilla Public License 2.0 | 4 votes |
@Override public Mat onCameraFrame(CvCameraViewFrame cvCameraViewFrame) { mRgba = cvCameraViewFrame.rgba(); mGray = cvCameraViewFrame.gray(); if (mAbsoluteFaceSize == 0) { int height = mGray.rows(); if (Math.round(height * mRelativeFaceSize) > 0) { mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); } } MatOfRect faces = new MatOfRect(); if (mDetectorType == JAVA_DETECTOR) { if (mJavaDetector != null) { mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); } } else { Log.e(TAG, "Detection method is not selected!"); } Rect[] facesArray = faces.toArray(); for (int i = 0; i < facesArray.length; i++) { Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3); face_middle_x = (int) facesArray[i].tl().x + facesArray[i].width / 2; face_middle_y = (int) facesArray[i].tl().y + facesArray[i].height / 2; Log.d("face middle : ", face_middle_x + "," + face_middle_y); Log.d(TAG, "faces array " + String.valueOf(i)); } // if(facesArray.length > 0) { // //auto save face when it detects the face // //Thread t = getBaseContext().getMainLooper().getThread(); // Thread t = new Thread(new Runnable() { // @Override // public void run() { // SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss_S"); // String currentDateandTime = sdf.format(new Date()); // String saveDir = Environment.getExternalStorageDirectory().getPath() + "/DCIM/OCV/FDSave"; // File dirCheck = new File(saveDir); // if(!dirCheck.exists()) { // dirCheck.mkdirs(); // } // String fileName = saveDir + "/" + currentDateandTime + ".jpg"; // try { // mOpenCvCameraView.takePicture(fileName); // } catch (Exception ex) { // ex.printStackTrace(); // } // } // }); // t.start(); // } // // if (mIsColorSelected) { // mDetector.process(mRgba); // List<MatOfPoint> contours = mDetector.getContours(); // Log.e(TAG, "Contours count: " + contours.size()); // Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR); // // Mat colorLabel = mRgba.submat(4, 68, 4, 68); // colorLabel.setTo(mBlobColorRgba); // // Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols()); // mSpectrum.copyTo(spectrumLabel); // } return mRgba; }
Example 9
Source File: FaceRecognitionActivity.java From AndroidFaceRecognizer with MIT License | 3 votes |
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); mGray = inputFrame.gray(); if(!detectionInProgress){ Mat image = new Mat(mGray.rows(), mGray.cols(), mGray.type()); mGray.copyTo(image); detectFaceOnFrame(image); } return mRgba; }