Java Code Examples for org.opencv.android.CameraBridgeViewBase#CvCameraViewFrame
The following examples show how to use
org.opencv.android.CameraBridgeViewBase#CvCameraViewFrame .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MainActivity.java From MOAAP with MIT License | 7 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { //Rotating the input frame Mat mGray = inputFrame.gray(); mRgba = inputFrame.rgba(); if (mIsFrontCamera) { Core.flip(mRgba, mRgba, 1); Core.flip(mGray, mGray, 1); } //Detecting face in the frame MatOfRect faces = new MatOfRect(); if(haarCascade != null) { haarCascade.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(200,200), new Size()); } Rect[] facesArray = faces.toArray(); for (int i = 0; i < facesArray.length; i++) Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), new Scalar(100), 3); return mRgba; }
Example 2
Source File: ComparisonFrameRender.java From OpenCV-AndroidSamples with MIT License | 6 votes |
@Override public Mat render(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat undistortedFrame = new Mat(inputFrame.rgba().size(), inputFrame.rgba().type()); Imgproc.undistort(inputFrame.rgba(), undistortedFrame, mCalibrator.getCameraMatrix(), mCalibrator.getDistortionCoefficients()); Mat comparisonFrame = inputFrame.rgba(); undistortedFrame.colRange(new Range(0, mWidth / 2)).copyTo(comparisonFrame.colRange(new Range(mWidth / 2, mWidth))); List<MatOfPoint> border = new ArrayList<MatOfPoint>(); final int shift = (int)(mWidth * 0.005); border.add(new MatOfPoint(new Point(mWidth / 2 - shift, 0), new Point(mWidth / 2 + shift, 0), new Point(mWidth / 2 + shift, mHeight), new Point(mWidth / 2 - shift, mHeight))); Imgproc.fillPoly(comparisonFrame, border, new Scalar(255, 255, 255)); Imgproc.putText(comparisonFrame, mResources.getString(R.string.original), new Point(mWidth * 0.1, mHeight * 0.1), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0)); Imgproc.putText(comparisonFrame, mResources.getString(R.string.undistorted), new Point(mWidth * 0.6, mHeight * 0.1), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0)); return comparisonFrame; }
Example 3
Source File: FtcTestGripVision.java From FtcSamples with MIT License | 6 votes |
/** * This method is called on every captured camera frame. It will do face detection on the * captured frame. * * @param inputFrame specifies the captured frame object. */ @Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { // // Get rid of the old unconsumed image if any. // if (image != null) { image.release(); image = null; } // // Get a fresh image. // image = inputFrame.rgba(); return image; }
Example 4
Source File: DetectionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 6 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); Mat img = new Mat(); imgRgba.copyTo(img); List<Mat> images = ppF.getCroppedImage(img); Rect[] faces = ppF.getFacesForRecognition(); // Selfie / Mirror mode if(front_camera){ Core.flip(imgRgba,imgRgba,1); } if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){ // skip return imgRgba; } else { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); for(int i = 0; i<faces.length; i++){ MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], "", front_camera); } return imgRgba; } }
Example 5
Source File: RecognitionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 6 votes |
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); Mat img = new Mat(); imgRgba.copyTo(img); List<Mat> images = ppF.getProcessedImage(img, PreProcessorFactory.PreprocessingMode.RECOGNITION); Rect[] faces = ppF.getFacesForRecognition(); // Selfie / Mirror mode if(front_camera){ Core.flip(imgRgba,imgRgba,1); } if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){ // skip return imgRgba; } else { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); for(int i = 0; i<faces.length; i++){ MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""), front_camera); } return imgRgba; } }
Example 6
Source File: OpenCvCamera.java From opencv-documentscanner-android with Apache License 2.0 | 5 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { // input frame has RGBA format mRgba = inputFrame.rgba(); mGray = inputFrame.gray(); doWithMat(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr()); return mRgba; }
Example 7
Source File: DetectActivity.java From FaceDetectDemo with Apache License 2.0 | 5 votes |
@Override // 这里执行人脸检测的逻辑, 根据OpenCV提供的例子实现(face-detection) public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); mGray = inputFrame.gray(); // 翻转矩阵以适配前后置摄像头 if (isFrontCamera) { Core.flip(mRgba, mRgba, 1); Core.flip(mGray, mGray, 1); } else { Core.flip(mRgba, mRgba, -1); Core.flip(mGray, mGray, -1); } float mRelativeFaceSize = 0.2f; if (mAbsoluteFaceSize == 0) { int height = mGray.rows(); if (Math.round(height * mRelativeFaceSize) > 0) { mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); } } MatOfRect faces = new MatOfRect(); if (classifier != null) classifier.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); Rect[] facesArray = faces.toArray(); Scalar faceRectColor = new Scalar(0, 255, 0, 255); for (Rect faceRect : facesArray) Imgproc.rectangle(mRgba, faceRect.tl(), faceRect.br(), faceRectColor, 3); return mRgba; }
Example 8
Source File: UndistortionFrameRender.java From OpenCV-AndroidSamples with MIT License | 5 votes |
@Override public Mat render(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat renderedFrame = new Mat(inputFrame.rgba().size(), inputFrame.rgba().type()); Imgproc.undistort(inputFrame.rgba(), renderedFrame, mCalibrator.getCameraMatrix(), mCalibrator.getDistortionCoefficients()); return renderedFrame; }
Example 9
Source File: AuthenticationActivity.java From ml-authentication with Apache License 2.0 | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); // Do not change screen brightness manually during test phase, due to the unknown location of the different test users. // M.Schälchli 20170129 // if (isDeviceRooted){ // DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba); // } long currentTime = new Date().getTime(); if ((!tensorFlowLoadingThread.isAlive()) && ((startTimeAuthenticationAnimation + AUTHENTICATION_ANIMATION_TIME) < currentTime)){ prepareForAuthentication(); if (!recognitionThread.isAlive() && recognitionThreadStarted) { List<Student> students = recognitionThread.getRecognizedStudent(); Student student = new Student(); if (students.size() == 1){ student = students.get(0); } numberOfTries++; Log.i(getClass().getName(), "Number of authentication/recognition tries: " + numberOfTries); if ((student != null) && (students.size() == 1)) { AuthenticationHelper.updateCurrentStudent(student, getApplicationContext(), false); finish(); } else if (numberOfTries >= NUMBER_OF_MAXIMUM_TRIES) { startStudentImageCollectionActivity(true); } recognitionThreadStarted = false; } Mat imgCopy = new Mat(); // Store original image for face recognition imgRgba.copyTo(imgCopy); // Mirror front camera image Core.flip(imgRgba,imgRgba,1); Rect face = new Rect(); boolean isFaceInsideFrame = false; boolean faceDetected = false; List<Mat> images = ppF.getCroppedImage(imgCopy); if (images != null && images.size() == 1){ Mat img = images.get(0); if (img != null){ Rect[] faces = ppF.getFacesForRecognition(); if (faces != null && faces.length == 1){ faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); face = faces[0]; faceDetected = true; // Reset startTimeFallback for fallback timeout, because at least one face has been detected startTimeFallback = currentTime; isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face); if (isFaceInsideFrame){ if (!recognitionThread.isAlive() && !recognitionThreadStarted){ if (!activityStopped){ mediaPlayerAnimalSound.start(); recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao); recognitionThread.setImg(img); recognitionThread.start(); recognitionThreadStarted = true; } } } } } } if (faceDetected && !isFaceInsideFrame && !activityStopped){ DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face); AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound); } if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){ // Prevent from second execution of fallback activity because of threading startTimeFallback = currentTime; DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName()); finish(); } EnvironmentSettings.freeMemory(); } return imgRgba; }
Example 10
Source File: MainActivity.java From opencv-android-sample with MIT License | 4 votes |
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { // return performFindFeatures(inputFrame); return performIncreaseContrast(inputFrame); }
Example 11
Source File: PreviewFrameRender.java From OpenCV-AndroidSamples with MIT License | 4 votes |
@Override public Mat render(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { return inputFrame.rgba(); }
Example 12
Source File: VisionEnabledActivity.java From FTCVision with MIT License | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { opMode.loop(); opMode.fps.update(); return opMode.frame(inputFrame.rgba(), inputFrame.gray()); }
Example 13
Source File: CVRenderer.java From faceswap with Apache License 2.0 | 4 votes |
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Imgproc.cvtColor(inputFrame.rgba(), displayFrame, Imgproc.COLOR_BGRA2BGR); if (this.delegate!=null){ //TODO: needed so that bg encoding won't interfere with foreground getting a new image displayFrame.copyTo(transmitFrame); delegate.onCVPreviewAvailable(transmitFrame); } synchronized (this.faces){ for (Face face:faces){ int[] roi = face.realRoi; boundryCheck(roi, displayFrame.width(), displayFrame.height()); Point leftTop=new Point(roi[0], roi[1]); Point rightBottom=new Point(roi[2], roi[3]); if (face.isRenderring && face.argbMat!=null){ rightBottom=new Point(roi[0]+face.argbMat.width(), roi[1]+face.argbMat.height()); Rect pRoi = new Rect(roi[0],roi[1], face.argbMat.width(), face.argbMat.height()); Log.d("debug", "pRoi : " + pRoi.toString()); Log.d("debug", "display frame width : " + displayFrame.width() + " display frame height: " + displayFrame.height()); Mat pRoiMat=displayFrame.submat(pRoi); Log.d("debug", "display frame width : " + displayFrame.width() + " display frame height: " + displayFrame.height()); face.argbMat.copyTo(pRoiMat); } Imgproc.rectangle(displayFrame, leftTop, rightBottom, new Scalar(255, 0, 0)); Imgproc.putText(displayFrame, face.getName(), new Point(roi[0], roi[1]), 0, 0.8, new Scalar(255,255,0)); } } Log.d(LOG_TAG, "rendered"); return displayFrame; // Mat pRoi=mRgba.submat(10,10+replaceImg.width(),10,10+replaceImg.height()); // Log.d("debug", "mat width: " + pRoi.width() + " height: " + pRoi.height()); // Log.d("debug", "img width: " + replaceImg.width() + " height: " + replaceImg.height()); // replaceImg.copyTo(pRoi); }
Example 14
Source File: StudentImageCollectionActivity.java From ml-authentication with Apache License 2.0 | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { final Mat imgRgba = inputFrame.rgba(); // Do not change screen brightness manually during test phase, due to the unknown location of the different test users. // M.Schälchli 20170129 // if (isDeviceRooted){ // DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba); // } long currentTime = new Date().getTime(); if (authenticationAnimationAlreadyPlayed || ((startTimeAuthenticationAnimation + AuthenticationActivity.AUTHENTICATION_ANIMATION_TIME) < currentTime)){ prepareForAuthentication(); Mat imgCopy = new Mat(); // Store original image for face recognition imgRgba.copyTo(imgCopy); // Mirror front camera image Core.flip(imgRgba,imgRgba,1); Rect face = new Rect(); boolean isFaceInsideFrame = false; boolean faceDetected = false; if((lastTime + TIMER_DIFF) < currentTime){ lastTime = currentTime; List<Mat> images = ppF.getCroppedImage(imgCopy); if((images != null) && (images.size() == 1)){ Mat img = images.get(0); if(img != null) { Rect[] faces = ppF.getFacesForRecognition(); if ((faces != null) && (faces.length == 1)) { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); face = faces[0]; faceDetected = true; // Reset startTimeFallback for fallback timeout, because at least one face has been detected startTimeFallback = currentTime; isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face); if (isFaceInsideFrame){ if (!activityStopped){ mediaPlayerAnimalSound.start(); studentImages.add(img); // Stop after NUMBER_OF_IMAGES (settings option) if(imagesProcessed == NUMBER_OF_IMAGES){ storeStudentImages(); finish(); } imagesProcessed++; } } } } } } if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){ // Prevent from second execution of fallback activity because of threading startTimeFallback = currentTime; DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName()); finish(); } if (faceDetected && !isFaceInsideFrame && !activityStopped){ DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face); AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound); } EnvironmentSettings.freeMemory(); } return imgRgba; }
Example 15
Source File: AuthenticationActivity.java From ml-authentication with Apache License 2.0 | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); // Do not change screen brightness manually during test phase, due to the unknown location of the different test users. // M.Schälchli 20170129 // if (isDeviceRooted){ // DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba); // } long currentTime = new Date().getTime(); if ((!tensorFlowLoadingThread.isAlive()) && ((startTimeAuthenticationAnimation + AUTHENTICATION_ANIMATION_TIME) < currentTime)){ prepareForAuthentication(); if (!recognitionThread.isAlive() && recognitionThreadStarted) { List<Student> students = recognitionThread.getRecognizedStudent(); Student student = new Student(); if (students.size() == 1){ student = students.get(0); } numberOfTries++; Log.i(getClass().getName(), "Number of authentication/recognition tries: " + numberOfTries); if ((student != null) && (students.size() == 1)) { AuthenticationHelper.updateCurrentStudent(student, getApplicationContext(), false); finish(); } else if (numberOfTries >= NUMBER_OF_MAXIMUM_TRIES) { startStudentImageCollectionActivity(true); } recognitionThreadStarted = false; } Mat imgCopy = new Mat(); // Store original image for face recognition imgRgba.copyTo(imgCopy); // Mirror front camera image Core.flip(imgRgba,imgRgba,1); Rect face = new Rect(); boolean isFaceInsideFrame = false; boolean faceDetected = false; List<Mat> images = ppF.getCroppedImage(imgCopy); if (images != null && images.size() == 1){ Mat img = images.get(0); if (img != null){ Rect[] faces = ppF.getFacesForRecognition(); if (faces != null && faces.length == 1){ faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); face = faces[0]; faceDetected = true; // Reset startTimeFallback for fallback timeout, because at least one face has been detected startTimeFallback = currentTime; isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face); if (isFaceInsideFrame){ if (!recognitionThread.isAlive() && !recognitionThreadStarted){ if (!activityStopped){ mediaPlayerAnimalSound.start(); recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao); recognitionThread.setImg(img); recognitionThread.start(); recognitionThreadStarted = true; } } } } } } if (faceDetected && !isFaceInsideFrame && !activityStopped){ DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face); AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound); } if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){ // Prevent from second execution of fallback activity because of threading startTimeFallback = currentTime; DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName()); finish(); } EnvironmentSettings.freeMemory(); } return imgRgba; }
Example 16
Source File: StudentImageCollectionActivity.java From ml-authentication with Apache License 2.0 | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { final Mat imgRgba = inputFrame.rgba(); // Do not change screen brightness manually during test phase, due to the unknown location of the different test users. // M.Schälchli 20170129 // if (isDeviceRooted){ // DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba); // } long currentTime = new Date().getTime(); if (authenticationAnimationAlreadyPlayed || ((startTimeAuthenticationAnimation + AuthenticationActivity.AUTHENTICATION_ANIMATION_TIME) < currentTime)){ prepareForAuthentication(); Mat imgCopy = new Mat(); // Store original image for face recognition imgRgba.copyTo(imgCopy); // Mirror front camera image Core.flip(imgRgba,imgRgba,1); Rect face = new Rect(); boolean isFaceInsideFrame = false; boolean faceDetected = false; if((lastTime + TIMER_DIFF) < currentTime){ lastTime = currentTime; List<Mat> images = ppF.getCroppedImage(imgCopy); if((images != null) && (images.size() == 1)){ Mat img = images.get(0); if(img != null) { Rect[] faces = ppF.getFacesForRecognition(); if ((faces != null) && (faces.length == 1)) { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); face = faces[0]; faceDetected = true; // Reset startTimeFallback for fallback timeout, because at least one face has been detected startTimeFallback = currentTime; isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face); if (isFaceInsideFrame){ if (!activityStopped){ mediaPlayerAnimalSound.start(); studentImages.add(img); // Stop after NUMBER_OF_IMAGES (settings option) if(imagesProcessed == NUMBER_OF_IMAGES){ storeStudentImages(); finish(); } imagesProcessed++; } } } } } } if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){ // Prevent from second execution of fallback activity because of threading startTimeFallback = currentTime; DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName()); finish(); } if (faceDetected && !isFaceInsideFrame && !activityStopped){ DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face); AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound); } EnvironmentSettings.freeMemory(); } return imgRgba; }
Example 17
Source File: FtcTestOpenCv.java From FtcSamples with MIT License | 4 votes |
/** * This method is called on every captured camera frame. It will do face detection on the * captured frame. * * @param inputFrame specifies the captured frame object. */ @Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { final String funcName = "onCameraFrame"; // // Subject the captured frame for face detection. The face detector produces an array // of rectangles representing faces detected. // if (doColor) { image = inputFrame.rgba(); } else { image = inputFrame.gray(); doOverlayImage = false; } // rotateImage(image, image, 90.0); long startTime = System.currentTimeMillis(); faceDetector.detectMultiScale(image, faceRects); long elapsedTime = System.currentTimeMillis() - startTime; if (perfCheckEnabled) { totalProcessingTime += elapsedTime; framesProcessed++; } // // We may want to overlay a circle or rectangle on each detected faces or // we can overlay a fun image onto a detected face. Play with the code in // this for-loop and let your imagination run wild. // Rect[] rects = faceRects.toArray(); int maxArea = 0; int maxIndex = -1; // // Draw rectangles on faces found and find the largest face. // for (int i = 0; i < rects.length; i++) { // // Overlay a rectangle on the detected faces. // if (overlayRectangle) { Imgproc.rectangle(image, rects[i].tl(), rects[i].br(), FACE_RECT_COLOR, 3); } // // Find the largest detected face. // if (doOverlayImage) { int area = rects[i].width * rects[i].height; if (area > maxArea) { maxArea = area; maxIndex = i; } } } // // Overlay an image only on the largest detected face. // if (doOverlayImage && maxIndex != -1) { // // Scale the fun image to the same size as the face. // Mat scaledOverlay = new Mat(); Imgproc.resize(overlayImage, scaledOverlay, rects[maxIndex].size()); // // Overlay the scaled image to the camera image. // combineImage(image, scaledOverlay, rects[maxIndex].x, rects[maxIndex].y); } return image; }
Example 18
Source File: MainActivity.java From opencv-android-sample with MIT License | 4 votes |
protected Mat performIncreaseContrast(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); IncreaseContrast(mRgba.getNativeObjAddr()); return mRgba; }
Example 19
Source File: MainActivity.java From pasm-yolov3-Android with GNU General Public License v3.0 | 4 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); return inputFrame.rgba(); }
Example 20
Source File: FrameRender.java From OpenCV-AndroidSamples with MIT License | votes |
public abstract Mat render(CameraBridgeViewBase.CvCameraViewFrame inputFrame);