org.opencv.android.CameraBridgeViewBase Java Examples
The following examples show how to use
org.opencv.android.CameraBridgeViewBase.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MainActivity.java From MOAAP with MIT License | 7 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { //Rotating the input frame Mat mGray = inputFrame.gray(); mRgba = inputFrame.rgba(); if (mIsFrontCamera) { Core.flip(mRgba, mRgba, 1); Core.flip(mGray, mGray, 1); } //Detecting face in the frame MatOfRect faces = new MatOfRect(); if(haarCascade != null) { haarCascade.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(200,200), new Size()); } Rect[] facesArray = faces.toArray(); for (int i = 0; i < facesArray.length; i++) Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), new Scalar(100), 3); return mRgba; }
Example #2
Source File: MainActivity.java From MOAAP with MIT License | 6 votes |
@Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_main); if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { Log.i("permission", "request READ_EXTERNAL_STORAGE"); ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA); }else { Log.i("permission", "READ_EXTERNAL_STORAGE already granted"); camera_granted = true; } mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.main_activity_surface_view); mOpenCvCameraView.setCvCameraViewListener(this); }
Example #3
Source File: RecognitionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 6 votes |
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); Mat img = new Mat(); imgRgba.copyTo(img); List<Mat> images = ppF.getProcessedImage(img, PreProcessorFactory.PreprocessingMode.RECOGNITION); Rect[] faces = ppF.getFacesForRecognition(); // Selfie / Mirror mode if(front_camera){ Core.flip(imgRgba,imgRgba,1); } if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){ // skip return imgRgba; } else { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); for(int i = 0; i<faces.length; i++){ MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""), front_camera); } return imgRgba; } }
Example #4
Source File: DetectionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 6 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_detection); mDetectionView = (CustomCameraView) findViewById(R.id.DetectionView); // Use camera which is selected in settings SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this); front_camera = sharedPref.getBoolean("key_front_camera", true); night_portrait = sharedPref.getBoolean("key_night_portrait", false); exposure_compensation = Integer.valueOf(sharedPref.getString("key_exposure_compensation", "20")); if (front_camera){ mDetectionView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT); } else { mDetectionView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_BACK); } mDetectionView.setVisibility(SurfaceView.VISIBLE); mDetectionView.setCvCameraViewListener(this); int maxCameraViewWidth = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_width", "640")); int maxCameraViewHeight = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_height", "480")); mDetectionView.setMaxFrameSize(maxCameraViewWidth, maxCameraViewHeight); }
Example #5
Source File: CameraViewerFragment.java From PixaToon with GNU General Public License v3.0 | 6 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { mInputMat = inputFrame.rgba(); if(mCapturing || mStarting) { mOutputMat.setTo(new Scalar(0)); mCapturing = false; mStarting = false; return mOutputMat; } Filter currentFilter = mFilterManager.getCurrentFilter(); if(currentFilter != null) { if (mFilterManager.getFilterScaleFactor() != currentFilter.getDefaultScaleFactor()) mFilterManager.setFilterScaleFactor(currentFilter.getDefaultScaleFactor()); currentFilter.process(mInputMat, mOutputMat); return mOutputMat; } return mInputMat; }
Example #6
Source File: ShowCameraViewActivity.java From FaceT with Mozilla Public License 2.0 | 6 votes |
private void changeCamera() { try { mOpenCvCameraView.disableView(); if (usingFront) { mOpenCvCameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_BACK); mItemCameraId.setTitle("Back"); } else { mOpenCvCameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT); mItemCameraId.setTitle("Front"); } usingFront = !usingFront; mOpenCvCameraView.enableView(); //onResume(); } catch (Exception e) { e.printStackTrace(); } }
Example #7
Source File: DetectionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 6 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); Mat img = new Mat(); imgRgba.copyTo(img); List<Mat> images = ppF.getCroppedImage(img); Rect[] faces = ppF.getFacesForRecognition(); // Selfie / Mirror mode if(front_camera){ Core.flip(imgRgba,imgRgba,1); } if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){ // skip return imgRgba; } else { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); for(int i = 0; i<faces.length; i++){ MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], "", front_camera); } return imgRgba; } }
Example #8
Source File: GabrielClientActivity.java From faceswap with Apache License 2.0 | 6 votes |
private void init_once() { Log.d(DEBUG_TAG, "on init once"); renderer = new CVRenderer(this); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.cv_camera_view); mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE); mOpenCvCameraView.setCvCameraViewListener(renderer); // cameraOverlay = (CameraOverlay) findViewById(R.id.display_surface); // cameraOverlay.bringToFront(); // mPreview = (CameraPreview) findViewById(R.id.camera_preview); // if (Const.DISPLAY_PREVIEW_ONLY) { // RelativeLayout.LayoutParams invisibleLayout = new RelativeLayout.LayoutParams(0, 0); // mDisplay.setLayoutParams(invisibleLayout); // mDisplay.setVisibility(View.INVISIBLE); // mDisplay.setZOrderMediaOverlay(false); // } // mPreview.setPreviewCallback(previewCallback); // cameraOverlay.setImageSize(mPreview.imageSize); Const.ROOT_DIR.mkdirs(); Const.LATENCY_DIR.mkdirs(); hasStarted = true; }
Example #9
Source File: MainActivity.java From OpenCV-Android-Object-Detection with MIT License | 6 votes |
/** * Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.layout); if(ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_PERMISSION); } mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view); mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE); mOpenCvCameraView.setCvCameraViewListener(this); tvName = (TextView) findViewById(R.id.text1); }
Example #10
Source File: FtcTestGripVision.java From FtcSamples with MIT License | 6 votes |
/** * This method is called on every captured camera frame. It will do face detection on the * captured frame. * * @param inputFrame specifies the captured frame object. */ @Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { // // Get rid of the old unconsumed image if any. // if (image != null) { image.release(); image = null; } // // Get a fresh image. // image = inputFrame.rgba(); return image; }
Example #11
Source File: CameraActivity.java From Android-Car-duino with GNU General Public License v2.0 | 6 votes |
@SuppressWarnings("deprecation") @Override protected void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.camera_activity); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.CameraView); //mOpenCvCameraView.setMaxFrameSize(240,135); mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE); mOpenCvCameraView.setCvCameraViewListener(this); detector = new GestureDetector(this); debugConsole = (TextView) findViewById(R.id.debugConsole); if(blue.btEnabled) { try { bt.runBT(); } catch (IOException e) { e.printStackTrace(); } } }
Example #12
Source File: MainActivity.java From MOAAP with MIT License | 6 votes |
@Override protected void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_main); if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { Log.i("permission", "request READ_EXTERNAL_STORAGE"); ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA); }else { Log.i("permission", "READ_EXTERNAL_STORAGE already granted"); camera_granted = true; } mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.java_surface_view); mOpenCvCameraView.setCvCameraViewListener(this); }
Example #13
Source File: MainActivity.java From opencv-android-sample with MIT License | 6 votes |
/** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); // Load ndk built module, as specified // in moduleName in build.gradle System.loadLibrary("native"); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_main); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.camera_view); mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE); mOpenCvCameraView.setCvCameraViewListener(this); }
Example #14
Source File: DetectActivity.java From FaceDetectDemo with Apache License 2.0 | 6 votes |
@Override public void onClick(View v) { switch (v.getId()) { case R.id.switch_camera: cameraView.disableView(); if (isFrontCamera) { cameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_BACK); isFrontCamera = false; } else { cameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT); isFrontCamera = true; } cameraView.enableView(); break; default: } }
Example #15
Source File: FDOpenCVActivity.java From Image-Detection-Samples with Apache License 2.0 | 6 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_face_detection_opencv); openCvCameraView = (CameraBridgeViewBase) findViewById(R.id.camera_surface_view); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { requestPermissions(new String[]{Manifest.permission.CAMERA}, PERMISSIONS_REQUEST_CAMERA); } else { initPresenter(); presenter.setCamera(openCvCameraView); } }
Example #16
Source File: ComparisonFrameRender.java From OpenCV-AndroidSamples with MIT License | 6 votes |
@Override public Mat render(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat undistortedFrame = new Mat(inputFrame.rgba().size(), inputFrame.rgba().type()); Imgproc.undistort(inputFrame.rgba(), undistortedFrame, mCalibrator.getCameraMatrix(), mCalibrator.getDistortionCoefficients()); Mat comparisonFrame = inputFrame.rgba(); undistortedFrame.colRange(new Range(0, mWidth / 2)).copyTo(comparisonFrame.colRange(new Range(mWidth / 2, mWidth))); List<MatOfPoint> border = new ArrayList<MatOfPoint>(); final int shift = (int)(mWidth * 0.005); border.add(new MatOfPoint(new Point(mWidth / 2 - shift, 0), new Point(mWidth / 2 + shift, 0), new Point(mWidth / 2 + shift, mHeight), new Point(mWidth / 2 - shift, mHeight))); Imgproc.fillPoly(comparisonFrame, border, new Scalar(255, 255, 255)); Imgproc.putText(comparisonFrame, mResources.getString(R.string.original), new Point(mWidth * 0.1, mHeight * 0.1), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0)); Imgproc.putText(comparisonFrame, mResources.getString(R.string.undistorted), new Point(mWidth * 0.6, mHeight * 0.1), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0)); return comparisonFrame; }
Example #17
Source File: Tutorial2Activity.java From OpenCV-AndroidSamples with MIT License | 6 votes |
/** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_tutorial2); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial2_activity_surface_view); mOpenCvCameraView.setCvCameraViewListener(this); }
Example #18
Source File: UndistortionFrameRender.java From OpenCV-AndroidSamples with MIT License | 5 votes |
@Override public Mat render(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat renderedFrame = new Mat(inputFrame.rgba().size(), inputFrame.rgba().type()); Imgproc.undistort(inputFrame.rgba(), renderedFrame, mCalibrator.getCameraMatrix(), mCalibrator.getDistortionCoefficients()); return renderedFrame; }
Example #19
Source File: RecognitionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 5 votes |
@Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG,"called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.recognition_layout); progressBar = (ProgressBar)findViewById(R.id.progressBar); fh = new FileHelper(); File folder = new File(fh.getFolderPath()); if(folder.mkdir() || folder.isDirectory()){ Log.i(TAG,"New directory for photos created"); } else { Log.i(TAG,"Photos directory already existing"); } mRecognitionView = (CustomCameraView) findViewById(R.id.RecognitionView); // Use camera which is selected in settings SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this); front_camera = sharedPref.getBoolean("key_front_camera", true); night_portrait = sharedPref.getBoolean("key_night_portrait", false); exposure_compensation = Integer.valueOf(sharedPref.getString("key_exposure_compensation", "20")); if (front_camera){ mRecognitionView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT); } else { mRecognitionView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_BACK); } mRecognitionView.setVisibility(SurfaceView.VISIBLE); mRecognitionView.setCvCameraViewListener(this); int maxCameraViewWidth = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_width", "640")); int maxCameraViewHeight = Integer.parseInt(sharedPref.getString("key_maximum_camera_view_height", "480")); mRecognitionView.setMaxFrameSize(maxCameraViewWidth, maxCameraViewHeight); }
Example #20
Source File: CalibrationFrameRender.java From OpenCV-AndroidSamples with MIT License | 5 votes |
@Override public Mat render(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat rgbaFrame = inputFrame.rgba(); Mat grayFrame = inputFrame.gray(); mCalibrator.processFrame(grayFrame, rgbaFrame); return rgbaFrame; }
Example #21
Source File: VisionOpModeCore.java From FTCVision with MIT License | 5 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { if (!initialized) { return inputFrame.rgba(); } // telemetry.addData("Vision Status", "Ready!"); fps.update(); return frame(inputFrame.rgba(), inputFrame.gray()); }
Example #22
Source File: ImageManipulationsActivity.java From OpenCV-AndroidSamples with MIT License | 5 votes |
/** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_image_manipulations); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.image_manipulations_activity_surface_view); mOpenCvCameraView.setCvCameraViewListener(this); }
Example #23
Source File: VisionEnabledActivity.java From FTCVision with MIT License | 5 votes |
protected final void initializeVision(int framePreview, TestableVisionOpMode opMode) { openCVCamera = (CameraBridgeViewBase) findViewById(framePreview); openCVCamera.setVisibility(SurfaceView.VISIBLE); openCVCamera.setCvCameraViewListener(this); this.opMode = opMode; opMode.sensors = new Sensors(); opMode.fps = new FPS(); //FIXME this is the line that causes glitchiness TestableVisionOpMode.openCVCamera = (JavaCameraView) openCVCamera; }
Example #24
Source File: Puzzle15Activity.java From OpenCV-AndroidSamples with MIT License | 5 votes |
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); Log.d(TAG, "Creating and setting view"); mOpenCvCameraView = (CameraBridgeViewBase) new JavaCameraView(this, -1); setContentView(mOpenCvCameraView); mOpenCvCameraView.setCvCameraViewListener(this); mPuzzle15 = new Puzzle15Processor(); mPuzzle15.prepareNewGame(); }
Example #25
Source File: ColorBlobDetectionActivity.java From OpenCV-AndroidSamples with MIT License | 5 votes |
/** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); //requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_color_blob_detection); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.color_blob_detection_activity_surface_view); mOpenCvCameraView.setCvCameraViewListener(this); }
Example #26
Source File: CameraCalibrationActivity.java From OpenCV-AndroidSamples with MIT License | 5 votes |
@Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_camera_calibration); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.camera_calibration_java_surface_view); mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE); mOpenCvCameraView.setCvCameraViewListener(this); }
Example #27
Source File: Tutorial1Activity.java From OpenCV-AndroidSamples with MIT License | 5 votes |
/** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_tutorial1); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view); mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE); mOpenCvCameraView.setCvCameraViewListener(this); }
Example #28
Source File: FaceDetectionActivity.java From OpenCV-AndroidSamples with MIT License | 5 votes |
/** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_face_detection); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view); mOpenCvCameraView.setCvCameraViewListener(this); }
Example #29
Source File: CameraActivity.java From AndroidObjectDetection-OpenCV with MIT License | 5 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); if (!permissionGranted) { checkPermissions(); } mOpenCvCameraView = findViewById(R.id.CameraView); mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE); mOpenCvCameraView.setCvCameraViewListener(this); classNames = readLabels("labels.txt", this); for(int i=0; i<classNames.size(); i++) colors.add(randomColor()); }
Example #30
Source File: FaceFragment.java From OpenCV-android with Apache License 2.0 | 5 votes |
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); // 解决预览视图逆时针90度 Core.transpose(mRgba, mRgba); Core.flip(mRgba, mRgba, 1); Rect[] obj = face.detectObject(mRgba, matOfRect); for (Rect rect : obj) { Imgproc.rectangle(mRgba, rect.tl(), rect.br(), face.getRectColor(), 3); } return mRgba; }