androidx.camera.core.ImageProxy Java Examples
The following examples show how to use
androidx.camera.core.ImageProxy.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CameraXUtil.java From mollyim-android with GNU General Public License v3.0 | 5 votes |
@RequiresApi(21) private static boolean shouldCropImage(@NonNull ImageProxy image) { Size sourceSize = new Size(image.getWidth(), image.getHeight()); Size targetSize = new Size(image.getCropRect().width(), image.getCropRect().height()); return !targetSize.equals(sourceSize); }
Example #2
Source File: CameraAnalyzer.java From LPR with Apache License 2.0 | 5 votes |
@Override public void analyze(@NonNull ImageProxy image) { if (previewHandler != null) { Mat mat = ImagetoMat(image); if (mat != null) { if (prAddress == 0) { prAddress = scannerView.getPRAddress(); image.close(); return; } String res = PlateRecognition.SimpleRecognization(mat.getNativeObjAddr(), prAddress); Message message; if (!"".equals(res)) { message = Message.obtain(previewHandler, Scanner.OCR_SUCCEEDED, res); previewHandler = null; } else { message = Message.obtain(previewHandler, Scanner.OCR_FAILED); } message.sendToTarget(); } else Log.d("analyze", "Mat is null"); } else { Log.d(TAG, "previewHandler is null"); } try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); image.close(); } image.close(); }
Example #3
Source File: CameraAnalyzer.java From LPR with Apache License 2.0 | 5 votes |
private Mat ImagetoMat(ImageProxy imageProxy) { ImageProxy.PlaneProxy[] plane = imageProxy.getPlanes(); ByteBuffer yBuffer = plane[0].getBuffer(); // Y ByteBuffer uBuffer = plane[1].getBuffer(); // U ByteBuffer vBuffer = plane[2].getBuffer(); // V int ySize = yBuffer.remaining(); int uSize = uBuffer.remaining(); int vSize = vBuffer.remaining(); byte[] nv21 = new byte[ySize + uSize + vSize]; //U and V are swapped yBuffer.get(nv21, 0, ySize); vBuffer.get(nv21, ySize, vSize); uBuffer.get(nv21, ySize + vSize, uSize); try { YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, imageProxy.getWidth(), imageProxy.getHeight(), null); ByteArrayOutputStream stream = new ByteArrayOutputStream(nv21.length); yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 90, stream); Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size()); Matrix matrix = new Matrix(); matrix.postRotate(90); Rect rect = scannerView.getFramingRectInPreview(bitmap.getWidth(), bitmap.getHeight()); bitmap = Bitmap.createBitmap(bitmap, rect.top, rect.left, rect.height(), rect.width(), matrix, true); stream.close(); Mat mat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4); Utils.bitmapToMat(bitmap, mat); return mat; } catch (IOException e) { e.printStackTrace(); } return null; }
Example #4
Source File: CameraXFragment.java From mollyim-android with GNU General Public License v3.0 | 4 votes |
private void onCaptureClicked() { Stopwatch stopwatch = new Stopwatch("Capture"); CameraXSelfieFlashHelper flashHelper = new CameraXSelfieFlashHelper( requireActivity().getWindow(), camera, selfieFlash ); camera.takePicture(Executors.mainThreadExecutor(), new ImageCapture.OnImageCapturedCallback() { @Override public void onCaptureSuccess(@NonNull ImageProxy image) { flashHelper.endFlash(); SimpleTask.run(CameraXFragment.this.getViewLifecycleOwner().getLifecycle(), () -> { stopwatch.split("captured"); try { return CameraXUtil.toJpeg(image, camera.getCameraLensFacing() == CameraSelector.LENS_FACING_FRONT); } catch (IOException e) { return null; } finally { image.close(); } }, result -> { stopwatch.split("transformed"); stopwatch.stop(TAG); if (result != null) { controller.onImageCaptured(result.getData(), result.getWidth(), result.getHeight()); } else { controller.onCameraError(); } }); } @Override public void onError(ImageCaptureException exception) { flashHelper.endFlash(); controller.onCameraError(); } }); flashHelper.startFlash(); }
Example #5
Source File: MainActivity.java From journaldev with MIT License | 4 votes |
private ImageCapture setImageCapture() { ImageCaptureConfig imageCaptureConfig = new ImageCaptureConfig.Builder().setCaptureMode(ImageCapture.CaptureMode.MIN_LATENCY) .setTargetRotation(getWindowManager().getDefaultDisplay().getRotation()).build(); final ImageCapture imgCapture = new ImageCapture(imageCaptureConfig); btnCapture.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { imgCapture.takePicture(new ImageCapture.OnImageCapturedListener() { @Override public void onCaptureSuccess(ImageProxy image, int rotationDegrees) { Bitmap bitmap = textureView.getBitmap(); showAcceptedRejectedButton(true); ivBitmap.setImageBitmap(bitmap); } @Override public void onError(ImageCapture.UseCaseError useCaseError, String message, @Nullable Throwable cause) { super.onError(useCaseError, message, cause); } }); /*File file = new File( Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), "" + System.currentTimeMillis() + "_JDCameraX.jpg"); imgCapture.takePicture(file, new ImageCapture.OnImageSavedListener() { @Override public void onImageSaved(@NonNull File file) { Bitmap bitmap = textureView.getBitmap(); showAcceptedRejectedButton(true); ivBitmap.setImageBitmap(bitmap); } @Override public void onError(@NonNull ImageCapture.UseCaseError useCaseError, @NonNull String message, @Nullable Throwable cause) { } });*/ } }); return imgCapture; }
Example #6
Source File: MainActivity.java From journaldev with MIT License | 2 votes |
private ImageAnalysis setImageAnalysis() { // Setup image analysis pipeline that computes average pixel luminance HandlerThread analyzerThread = new HandlerThread("OpenCVAnalysis"); analyzerThread.start(); ImageAnalysisConfig imageAnalysisConfig = new ImageAnalysisConfig.Builder() .setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE) .setCallbackHandler(new Handler(analyzerThread.getLooper())) .setImageQueueDepth(1).build(); ImageAnalysis imageAnalysis = new ImageAnalysis(imageAnalysisConfig); imageAnalysis.setAnalyzer( new ImageAnalysis.Analyzer() { @Override public void analyze(ImageProxy image, int rotationDegrees) { //Analyzing live camera feed begins. final Bitmap bitmap = textureView.getBitmap(); if(bitmap==null) return; Mat mat = new Mat(); Utils.bitmapToMat(bitmap, mat); Imgproc.cvtColor(mat, mat, currentImageType); Utils.matToBitmap(mat, bitmap); runOnUiThread(new Runnable() { @Override public void run() { ivBitmap.setImageBitmap(bitmap); } }); } }); return imageAnalysis; }