Java Code Examples for android.media.ImageReader#acquireNextImage()
The following examples show how to use
android.media.ImageReader#acquireNextImage() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Camera2.java From VIA-AI with MIT License | 6 votes |
@Override public void onImageAvailable(ImageReader reader) { synchronized (mLock) { if (borrowedImage == null) { Image i = reader.acquireNextImage(); borrowedImage = i; if (bDebug) { Log.d(TAG, "==== OnImageReady ===="); Log.d(TAG, "Size:" + i.getWidth() + "x" + i.getHeight()); Log.d(TAG, "Format:" + i.getFormat()); Log.d(TAG, "#Planes:" + i.getPlanes().length); Log.d(TAG, "Y-Plane Pixel Stride:" + i.getPlanes()[0].getPixelStride()); Log.d(TAG, "Y-Planes Row Stride" + i.getPlanes()[0].getRowStride()); Log.d(TAG, "Y(i[0]) Start Address:" + NativeRender.getPointerFromByteBuffer(i.getPlanes()[0].getBuffer(), 0)); Log.d(TAG, "U(i[0]) Start Address:" + NativeRender.getPointerFromByteBuffer(i.getPlanes()[1].getBuffer(), 0)); Log.d(TAG, "V(i[0]) Start Address:" + NativeRender.getPointerFromByteBuffer(i.getPlanes()[2].getBuffer(), 0)); Log.d(TAG, "======================"); } if (mCallback != null) mCallback.onFrameReady(); } } }
Example 2
Source File: Camera2.java From TikTok with Apache License 2.0 | 5 votes |
@Override public void onImageAvailable(ImageReader reader) { try (Image image = reader.acquireNextImage()) { Image.Plane[] planes = image.getPlanes(); if (planes.length > 0) { ByteBuffer buffer = planes[0].getBuffer(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); mCallback.onPictureTaken(data); } } }
Example 3
Source File: Camera2Source.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 5 votes |
@Override public void onImageAvailable(ImageReader reader) { Image mImage = reader.acquireNextImage(); if (mImage == null) { return; } mFrameProcessor.setNextFrame(convertYUV420888ToNV21(mImage)); mImage.close(); }
Example 4
Source File: Camera2.java From LockDemo with Apache License 2.0 | 5 votes |
@Override public void onImageAvailable(ImageReader reader) { try (Image image = reader.acquireNextImage()) { Image.Plane[] planes = image.getPlanes(); if (planes.length > 0) { ByteBuffer buffer = planes[0].getBuffer(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); mCallback.onPictureTaken(data); } } }
Example 5
Source File: Camera2Source.java From Camera2Vision with Apache License 2.0 | 5 votes |
@Override public void onImageAvailable(ImageReader reader) { Image mImage = reader.acquireNextImage(); if(mImage == null) { return; } mFrameProcessor.setNextFrame(convertYUV420888ToNV21(mImage)); mImage.close(); }
Example 6
Source File: Camera2.java From MediaPickerInstagram with Apache License 2.0 | 5 votes |
@Override public void onImageAvailable(ImageReader reader) { try (Image image = reader.acquireNextImage()) { Image.Plane[] planes = image.getPlanes(); if (planes.length > 0) { ByteBuffer buffer = planes[0].getBuffer(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); mCallback.onPictureTaken(data); } } }
Example 7
Source File: Camera2.java From cameraview with Apache License 2.0 | 5 votes |
@Override public void onImageAvailable(ImageReader reader) { try (Image image = reader.acquireNextImage()) { Image.Plane[] planes = image.getPlanes(); if (planes.length > 0) { ByteBuffer buffer = planes[0].getBuffer(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); mCallback.onPictureTaken(data); } } }
Example 8
Source File: AndroidCamera2AgentImpl.java From Camera2 with Apache License 2.0 | 4 votes |
@Override public void takePicture(final Handler handler, final CameraShutterCallback shutter, CameraPictureCallback raw, CameraPictureCallback postview, final CameraPictureCallback jpeg) { // TODO: We never call raw or postview final CaptureAvailableListener picListener = new CaptureAvailableListener() { @Override public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) { if (shutter != null) { handler.post(new Runnable() { @Override public void run() { if (mShutterSoundEnabled) { mNoisemaker.play(MediaActionSound.SHUTTER_CLICK); } shutter.onShutter(AndroidCamera2ProxyImpl.this); }}); } } @Override public void onImageAvailable(ImageReader reader) { try (Image image = reader.acquireNextImage()) { if (jpeg != null) { ByteBuffer buffer = image.getPlanes()[0].getBuffer(); final byte[] pixels = new byte[buffer.remaining()]; buffer.get(pixels); handler.post(new Runnable() { @Override public void run() { jpeg.onPictureTaken(pixels, AndroidCamera2ProxyImpl.this); }}); } } }}; try { mDispatchThread.runJob(new Runnable() { @Override public void run() { // Wait until PREVIEW_ACTIVE or better mCameraState.waitForStates( ~(AndroidCamera2StateHolder.CAMERA_PREVIEW_ACTIVE - 1)); mCameraHandler.obtainMessage(CameraActions.CAPTURE_PHOTO, picListener) .sendToTarget(); } }); } catch (RuntimeException ex) { mCameraAgent.getCameraExceptionHandler().onDispatchThreadException(ex); } }
Example 9
Source File: ImageClassifierActivity.java From sample-tensorflow-imageclassifier with Apache License 2.0 | 4 votes |
@Override public void onImageAvailable(ImageReader reader) { final Bitmap bitmap; try (Image image = reader.acquireNextImage()) { bitmap = mImagePreprocessor.preprocessImage(image); } runOnUiThread(new Runnable() { @Override public void run() { mImage.setImageBitmap(bitmap); } }); final Collection<Recognition> results = mTensorFlowClassifier.doRecognize(bitmap); Log.d(TAG, "Got the following results from Tensorflow: " + results); runOnUiThread(new Runnable() { @Override public void run() { if (results == null || results.isEmpty()) { mResultText.setText("I don't understand what I see"); } else { StringBuilder sb = new StringBuilder(); Iterator<Recognition> it = results.iterator(); int counter = 0; while (it.hasNext()) { Recognition r = it.next(); sb.append(r.getTitle()); counter++; if (counter < results.size() - 1 ) { sb.append(", "); } else if (counter == results.size() - 1) { sb.append(" or "); } } mResultText.setText(sb.toString()); } } }); if (mTtsEngine != null) { // speak out loud the result of the image recognition mTtsSpeaker.speakResults(mTtsEngine, results); } else { // if theres no TTS, we don't need to wait until the utterance is spoken, so we set // to ready right away. setReady(true); } }
Example 10
Source File: CameraH264Activity.java From AndroidDemo with MIT License | 4 votes |
@Override public void onImageAvailable(ImageReader reader) { Image image = reader.acquireNextImage(); Log.d("image", "size [ w: " + image.getWidth() + " h: " + image.getHeight() + " ]"); }