Java Code Examples for org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame#rgba()

The following examples show how to use org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame#rgba() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ColorBlobDetectionActivity.java    From OpenCV-AndroidSamples with MIT License 7 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();

    if (mIsColorSelected) {
        mDetector.process(mRgba);
        List<MatOfPoint> contours = mDetector.getContours();
        Log.e(TAG, "Contours count: " + contours.size());
        Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);

        Mat colorLabel = mRgba.submat(4, 68, 4, 68);
        colorLabel.setTo(mBlobColorRgba);

        Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
        mSpectrum.copyTo(spectrumLabel);
    }

    return mRgba;
}
 
Example 2
Source File: MainActivity.java    From open-quartz with Apache License 2.0 6 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    switch (MainActivity.viewMode) {
        case MainActivity.VIEW_MODE_RGBA:
            return mRgba;

        case MainActivity.VIEW_MODE_HIST:
            return mRgba;

        case MainActivity.VIEW_MODE_CANNY:
            Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
            Imgproc.cvtColor(mIntermediateMat, mGray, Imgproc.COLOR_GRAY2BGRA, 4);
            return mGray;

        case MainActivity.VIEW_MODE_SOBEL:
            Imgproc.Sobel(mGray, mGray, CvType.CV_8U, 1, 1);
            //			Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
            Imgproc.cvtColor(mGray, mGray, Imgproc.COLOR_GRAY2BGRA, 4);
            return mGray;

        case MainActivity.VIEW_MODE_PIXELIZE:
            Imgproc.resize(mGray, mIntermediateMat, mSize0, 0.1, 0.1,
                Imgproc.INTER_NEAREST);
            Imgproc.resize(mIntermediateMat, mRgba, mRgba.size(), 0.0, 0.0,
                Imgproc.INTER_NEAREST);
            return mRgba;

        case MainActivity.VIEW_MODE_GRAY:
            return mGray;

        case MainActivity.VIEW_MODE_FEATURES:
            FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
            return mRgba;

        default:
            return mRgba;
    }
}
 
Example 3
Source File: MainActivity.java    From MOAAP with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        //Get image size and draw a rectangle on the image for reference
        Mat temp = inputFrame.rgba();
        Imgproc.rectangle(temp, new Point(temp.cols()/2 - 200, temp.rows() / 2 - 200), new Point(temp.cols() / 2 + 200, temp.rows() / 2 + 200), new Scalar(255,255,255),1);
        Mat digit = temp.submat(temp.rows()/2 - 180, temp.rows() / 2 + 180, temp.cols() / 2 - 180, temp.cols() / 2 + 180).clone();
        Core.transpose(digit,digit);
        int predict_result = mnist.FindMatch(digit);
        Imgproc.putText(temp, Integer.toString(predict_result), new Point(50, 150), FONT_HERSHEY_SIMPLEX, 3.0, new Scalar(0, 0, 255), 5);

        return temp;
    }
 
Example 4
Source File: MonitoringPresenter.java    From go-bees with GNU General Public License v3.0 5 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    if(showAlgoOutput) {
        int numBees = bc.countBees(inputFrame.gray());
        view.setNumBees(numBees);
        bc.getProcessedFrame().copyTo(processedFrame);
        bc.getProcessedFrame().release();
        return processedFrame;
    }
    // If show algorithm output is false -> show original frame
    return inputFrame.rgba();
}
 
Example 5
Source File: MainActivity.java    From Form-N-Fun with MIT License 5 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba=inputFrame.rgba();
    if (Build.MODEL.equalsIgnoreCase("Nexus 5X")) //flip the frame on nexus5x
        Core.flip(mRgba, mRgba,-1);
    findmazesandballs.apply(mRgba); //process frames
    return mRgba;
}
 
Example 6
Source File: Tutorial2Activity.java    From OpenCV-AndroidSamples with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    final int viewMode = mViewMode;
    switch (viewMode) {
        case VIEW_MODE_GRAY:
            // input frame has gray scale format
            Imgproc.cvtColor(inputFrame.gray(), mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
            break;
        case VIEW_MODE_RGBA:
            // input frame has RBGA format
            mRgba = inputFrame.rgba();
            break;
        case VIEW_MODE_CANNY:
            // input frame has gray scale format
            mRgba = inputFrame.rgba();
            Imgproc.Canny(inputFrame.gray(), mIntermediateMat, 80, 100);
            Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
            break;
        case VIEW_MODE_FEATURES:
            // input frame has RGBA format
            mRgba = inputFrame.rgba();
            mGray = inputFrame.gray();
            FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
            break;
    }

    return mRgba;
}
 
Example 7
Source File: FaceDetectionActivity.java    From OpenCV-AndroidSamples with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
        }
        else if (mDetectorType == NATIVE_DETECTOR) {
            if (mNativeDetector != null)
                mNativeDetector.detect(mGray, faces);
        }
        else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++)
            Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

        return mRgba;
    }
 
Example 8
Source File: FaceDetectionActivity.java    From AndroidFaceRecognizer with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        
        mGray = inputFrame.gray();
        
        if(!detectionInProgress){
        	Mat image = new Mat(mGray.rows(), mGray.cols(), mGray.type());
        	mGray.copyTo(image);
        	detectFaceOnFrame(image);
        }
        
        return mRgba;
    }
 
Example 9
Source File: FdActivity.java    From open-quartz with Apache License 2.0 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    if (mAbsoluteFaceSize == 0) {
        int height = mGray.rows();
        if (Math.round(height * mRelativeFaceSize) > 0) {
            mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
        }
    }

    MatOfRect faces = new MatOfRect();

    if (mJavaDetector != null) {
        mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2,
            // TODO: objdetect.CV_HAAR_SCALE_IMAGE
            new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
    }

    // Draw rectangles
    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++) {
        Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
    }

    return mRgba;
}
 
Example 10
Source File: CameraActivity.java    From AndroidObjectDetection-OpenCV with MIT License 4 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    Mat frame = inputFrame.rgba();
    Imgproc.cvtColor(frame, frame, Imgproc.COLOR_RGBA2RGB);
    Size frame_size = new Size(416, 416);
    Scalar mean = new Scalar(127.5);

    Mat blob = Dnn.blobFromImage(frame, 1.0 / 255.0, frame_size, mean, true, false);
    //save_mat(blob);
    net.setInput(blob);

    List<Mat> result = new ArrayList<>();
    List<String> outBlobNames = net.getUnconnectedOutLayersNames();

    net.forward(result, outBlobNames);
    float confThreshold = 0.5f;

    for (int i = 0; i < result.size(); ++i) {
        // each row is a candidate detection, the 1st 4 numbers are
        // [center_x, center_y, width, height], followed by (N-4) class probabilities
        Mat level = result.get(i);
        for (int j = 0; j < level.rows(); ++j) {
            Mat row = level.row(j);
            Mat scores = row.colRange(5, level.cols());
            Core.MinMaxLocResult mm = Core.minMaxLoc(scores);
            float confidence = (float) mm.maxVal;
            Point classIdPoint = mm.maxLoc;
            if (confidence > confThreshold) {

                int centerX = (int) (row.get(0, 0)[0] * frame.cols());
                int centerY = (int) (row.get(0, 1)[0] * frame.rows());
                int width = (int) (row.get(0, 2)[0] * frame.cols());
                int height = (int) (row.get(0, 3)[0] * frame.rows());

                int left = (int) (centerX - width * 0.5);
                int top =(int)(centerY - height * 0.5);
                int right =(int)(centerX + width * 0.5);
                int bottom =(int)(centerY + height * 0.5);

                Point left_top = new Point(left, top);
                Point right_bottom=new Point(right, bottom);
                Point label_left_top = new Point(left, top-5);
                DecimalFormat df = new DecimalFormat("#.##");

                int class_id = (int) classIdPoint.x;
                String label= classNames.get(class_id) + ": " + df.format(confidence);
                Scalar color= colors.get(class_id);

                Imgproc.rectangle(frame, left_top,right_bottom , color, 3, 2);
                Imgproc.putText(frame, label, label_left_top, Imgproc.FONT_HERSHEY_SIMPLEX, 1, new Scalar(0, 0, 0), 4);
                Imgproc.putText(frame, label, label_left_top, Imgproc.FONT_HERSHEY_SIMPLEX, 1, new Scalar(255, 255, 255), 2);
            }
        }
    }
    return frame;
}
 
Example 11
Source File: ShowCameraViewActivity.java    From FaceT with Mozilla Public License 2.0 4 votes vote down vote up
@Override
    public Mat onCameraFrame(CvCameraViewFrame cvCameraViewFrame) {

        mRgba = cvCameraViewFrame.rgba();
        mGray = cvCameraViewFrame.gray();


        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
        }


        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null) {
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());

            }
        } else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++) {
            Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
            face_middle_x = (int) facesArray[i].tl().x + facesArray[i].width / 2;
            face_middle_y = (int) facesArray[i].tl().y + facesArray[i].height / 2;
            Log.d("face middle : ", face_middle_x + "," + face_middle_y);
            Log.d(TAG, "faces array " + String.valueOf(i));

        }
//        if(facesArray.length > 0) {
//            //auto save face when it detects the face
//            //Thread t = getBaseContext().getMainLooper().getThread();
//            Thread t = new Thread(new Runnable() {
//                @Override
//                public void run() {
//                    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss_S");
//                    String currentDateandTime = sdf.format(new Date());
//                    String saveDir = Environment.getExternalStorageDirectory().getPath() + "/DCIM/OCV/FDSave";
//                    File dirCheck = new File(saveDir);
//                    if(!dirCheck.exists()) {
//                        dirCheck.mkdirs();
//                    }
//                    String fileName = saveDir + "/" + currentDateandTime + ".jpg";
//                    try {
//                        mOpenCvCameraView.takePicture(fileName);
//                    } catch (Exception ex) {
//                        ex.printStackTrace();
//                    }
//                }
//            });
//            t.start();
//        }
//
//        if (mIsColorSelected) {
//            mDetector.process(mRgba);
//            List<MatOfPoint> contours = mDetector.getContours();
//            Log.e(TAG, "Contours count: " + contours.size());
//            Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
//
//            Mat colorLabel = mRgba.submat(4, 68, 4, 68);
//            colorLabel.setTo(mBlobColorRgba);
//
//            Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
//            mSpectrum.copyTo(spectrumLabel);
//        }

        return mRgba;
    }
 
Example 12
Source File: ResistorImageProcessor.java    From ResistorScanner with MIT License 4 votes vote down vote up
public Mat processFrame(CvCameraViewFrame frame)
{
    Mat imageMat = frame.rgba();
    int cols = imageMat.cols();
    int rows = imageMat.rows();

    Mat subMat = imageMat.submat(rows/2, rows/2+30, cols/2 - 50, cols/2 + 50);
    Mat filteredMat = new Mat();
    Imgproc.cvtColor(subMat, subMat, Imgproc.COLOR_RGBA2BGR);
    Imgproc.bilateralFilter(subMat, filteredMat, 5, 80, 80);
    Imgproc.cvtColor(filteredMat, filteredMat, Imgproc.COLOR_BGR2HSV);

    findLocations(filteredMat);

    if(_locationValues.size() >= 3)
    {
        // recover the resistor value by iterating through the centroid locations
        // in an ascending manner and using their associated colour values
        int k_tens = _locationValues.keyAt(0);
        int k_units = _locationValues.keyAt(1);
        int k_power = _locationValues.keyAt(2);

        int value = 10*_locationValues.get(k_tens) + _locationValues.get(k_units);
        value *= Math.pow(10, _locationValues.get(k_power));

        String valueStr;
        if(value >= 1e3 && value < 1e6)
            valueStr = String.valueOf(value/1e3) + " KOhm";
        else if(value >= 1e6)
            valueStr = String.valueOf(value/1e6) + " MOhm";
        else
            valueStr = String.valueOf(value) + " Ohm";

        if(value <= 1e9)
            Core.putText(imageMat, valueStr, new Point(10, 100), Core.FONT_HERSHEY_COMPLEX,
                         2, new Scalar(255, 0, 0, 255), 3);
    }

    Scalar color = new Scalar(255, 0, 0, 255);
    Core.line(imageMat, new Point(cols/2 - 50, rows/2), new Point(cols/2 + 50, rows/2 ), color, 2);
    return imageMat;
}
 
Example 13
Source File: Tutorial1Activity.java    From OpenCV-AndroidSamples with MIT License 4 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    return inputFrame.rgba();
}
 
Example 14
Source File: Tutorial3Activity.java    From OpenCV-AndroidSamples with MIT License 4 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    return inputFrame.rgba();
}
 
Example 15
Source File: AAVActivity.java    From AAV with GNU General Public License v2.0 4 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
	synchronized (inputFrame) {

		_rgbaImage = inputFrame.rgba();

		if (android.os.Build.MODEL.equalsIgnoreCase("Nexus 5X")) {
			Core.flip(_rgbaImage, _rgbaImage, -1);
		}

		double current_contour;

		// In contrast to the C++ interface, Android API captures images in the RGBA format.
		// Also, in HSV space, only the hue determines which color it is. Saturation determines
		// how 'white' the color is, and Value determines how 'dark' the color is.
		Imgproc.cvtColor(_rgbaImage, _hsvMat, Imgproc.COLOR_RGB2HSV_FULL);

		Core.inRange(_hsvMat, _lowerThreshold, _upperThreshold, _processedMat);

		// Imgproc.dilate(_processedMat, _dilatedMat, new Mat());
		Imgproc.erode(_processedMat, _dilatedMat, new Mat());
		Imgproc.findContours(_dilatedMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
		MatOfPoint2f points = new MatOfPoint2f();
		_contourArea = 7;
		for (int i = 0, n = contours.size(); i < n; i++) {
			current_contour = Imgproc.contourArea(contours.get(i));
			if (current_contour > _contourArea) {
				_contourArea = current_contour;
				contours.get(i).convertTo(points, CvType.CV_32FC2); // contours.get(x) is a single MatOfPoint, but to use minEnclosingCircle we need to pass a MatOfPoint2f so we need to do a
				// conversion
			}
		}
		if (!points.empty() && _contourArea > MIN_CONTOUR_AREA) {
			Imgproc.minEnclosingCircle(points, _centerPoint, null);
			// Core.circle(_rgbaImage, _centerPoint, 3, new Scalar(255, 0, 0), Core.FILLED);
			if (_showContourEnable)
				Core.circle(_rgbaImage, _centerPoint, (int) Math.round(Math.sqrt(_contourArea / Math.PI)), new Scalar(255, 0, 0), 3, 8, 0);// Core.FILLED);
		}
		contours.clear();
	}
	return _rgbaImage;
}
 
Example 16
Source File: FaceRecognitionActivity.java    From AndroidFaceRecognizer with MIT License 3 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        
        mGray = inputFrame.gray();
        
        if(!detectionInProgress){
        	Mat image = new Mat(mGray.rows(), mGray.cols(), mGray.type());
        	mGray.copyTo(image);
        	detectFaceOnFrame(image);
        }


        return mRgba;
    }