Java Code Examples for android.media.Image#getWidth()
The following examples show how to use
android.media.Image#getWidth() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ImageUtils.java From SimpleSmsRemote with MIT License | 8 votes |
/** * Retrieve Bitmap with specific format from ImageReader. * * @param imageReader the image reader * @return bitmap */ @RequiresApi(api = Build.VERSION_CODES.KITKAT) public static Bitmap GetBitmapFromImageReader(ImageReader imageReader) { Bitmap bitmap; //get image buffer Image image = imageReader.acquireLatestImage(); final Image.Plane[] planes = image.getPlanes(); final ByteBuffer buffer = planes[0].getBuffer(); int pixelStride = planes[0].getPixelStride(); int rowStride = planes[0].getRowStride(); int rowPadding = rowStride - pixelStride * image.getWidth(); // create bitmap bitmap = Bitmap.createBitmap(image.getWidth() + rowPadding / pixelStride, image.getHeight(), Bitmap.Config.ARGB_8888); bitmap.copyPixelsFromBuffer(buffer); image.close(); return bitmap; }
Example 2
Source File: DngCreator.java From android_9.0.0_r45 with Apache License 2.0 | 6 votes |
/** * Set the thumbnail image. * * <p> * Pixel data is interpreted as a {@link android.graphics.ImageFormat#YUV_420_888} image. * Thumbnail images with a dimension larger than {@link #MAX_THUMBNAIL_DIMENSION} will be * rejected. * </p> * * @param pixels an {@link android.media.Image} object with the format * {@link android.graphics.ImageFormat#YUV_420_888}. * @return this {@link #DngCreator} object. * @throws java.lang.IllegalArgumentException if the given thumbnail image has a dimension * larger than {@link #MAX_THUMBNAIL_DIMENSION}. */ @NonNull public DngCreator setThumbnail(@NonNull Image pixels) { if (pixels == null) { throw new IllegalArgumentException("Null argument to setThumbnail"); } int format = pixels.getFormat(); if (format != ImageFormat.YUV_420_888) { throw new IllegalArgumentException("Unsupported Image format " + format); } int width = pixels.getWidth(); int height = pixels.getHeight(); if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) { throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width + "," + height + ") too large, dimensions must be smaller than " + MAX_THUMBNAIL_DIMENSION); } ByteBuffer rgbBuffer = convertToRGB(pixels); nativeSetThumbnail(rgbBuffer, width, height); return this; }
Example 3
Source File: ImageCapture.java From DoraemonKit with Apache License 2.0 | 6 votes |
void capture() { if (isCapturing) { return; } if (mImageReader == null) { return; } isCapturing = true; Image image = mImageReader.acquireLatestImage(); if (image == null) { return; } int width = image.getWidth(); int height = image.getHeight(); Image.Plane[] planes = image.getPlanes(); ByteBuffer buffer = planes[0].getBuffer(); int pixelStride = planes[0].getPixelStride(); int rowStride = planes[0].getRowStride(); int rowPaddingStride = rowStride - pixelStride * width; int rowPadding = rowPaddingStride / pixelStride; Bitmap recordBitmap = Bitmap.createBitmap(width + rowPadding, height, Bitmap.Config.ARGB_8888); recordBitmap.copyPixelsFromBuffer(buffer); mBitmap = Bitmap.createBitmap(recordBitmap, 0, 0, width, height); image.close(); isCapturing = false; }
Example 4
Source File: ImageUtils.java From ScreenCapture with MIT License | 6 votes |
/** * 这个方法可以转换,但是得到的图片右边多了一列,比如上面方法得到1080x2160,这个方法得到1088x2160 * 所以要对得到的Bitmap裁剪一下 * * @param image * @param config * @return */ public static Bitmap image_2_bitmap(Image image, Bitmap.Config config) { int width = image.getWidth(); int height = image.getHeight(); Bitmap bitmap; final Image.Plane[] planes = image.getPlanes(); final ByteBuffer buffer = planes[0].getBuffer(); int pixelStride = planes[0].getPixelStride(); int rowStride = planes[0].getRowStride(); int rowPadding = rowStride - pixelStride * width; Log.d("WOW", "pixelStride:" + pixelStride + ". rowStride:" + rowStride + ". rowPadding" + rowPadding); bitmap = Bitmap.createBitmap( width + rowPadding / pixelStride/*equals: rowStride/pixelStride */ , height, config); bitmap.copyPixelsFromBuffer(buffer); return Bitmap.createBitmap(bitmap, 0, 0, width, height); // return bitmap; }
Example 5
Source File: SRManager.java From VMLibrary with Apache License 2.0 | 6 votes |
/** * 通过底层来获取下一帧的图像 */ public Bitmap cutoutFrame() { Image image = imageReader.acquireLatestImage(); if (image == null) { return null; } int width = image.getWidth(); int height = image.getHeight(); final Image.Plane[] planes = image.getPlanes(); final ByteBuffer buffer = planes[0].getBuffer(); int pixelStride = planes[0].getPixelStride(); int rowStride = planes[0].getRowStride(); int rowPadding = rowStride - pixelStride * width; Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888); bitmap.copyPixelsFromBuffer(buffer); return Bitmap.createBitmap(bitmap, 0, 0, width, height); }
Example 6
Source File: ImageScreenCast.java From DeviceConnect-Android with MIT License | 6 votes |
private Bitmap decodeToBitmap(final Image img) { Image.Plane[] planes = img.getPlanes(); if (planes[0].getBuffer() == null) { return null; } int width = img.getWidth(); int height = img.getHeight(); int pixelStride = planes[0].getPixelStride(); int rowStride = planes[0].getRowStride(); int rowPadding = rowStride - pixelStride * width; Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888); bitmap.copyPixelsFromBuffer(planes[0].getBuffer()); img.close(); return Bitmap.createBitmap(bitmap, 0, 0, width, height, null, true); }
Example 7
Source File: MainActivity.java From AndroidPlayground with MIT License | 5 votes |
@Override public void onImageAvailable(ImageReader imageReader) { Log.d("MainActivity", "onImageAvailable"); final Image image = imageReader.acquireLatestImage(); count++; if (count == 100) { byte[] yuv = new byte[image.getWidth() * image.getHeight() * 3 / 2]; image2yuv(image, yuv); saveRawYuvData(yuv, image.getWidth(), image.getHeight(), "org"); } image.close(); }
Example 8
Source File: WindowCaptureFragment.java From ViewCapture with Apache License 2.0 | 5 votes |
private Bitmap createBitmap() { Image image = mImageReader.acquireLatestImage(); int width = image.getWidth(); int height = image.getHeight(); final Image.Plane[] planes = image.getPlanes(); final ByteBuffer buffer = planes[0].getBuffer(); int pixelStride = planes[0].getPixelStride(); int rowStride = planes[0].getRowStride(); int rowPadding = rowStride - pixelStride * width; Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888); bitmap.copyPixelsFromBuffer(buffer); bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height); image.close(); return bitmap; }
Example 9
Source File: ImageUtils.java From ScreenCapture with MIT License | 5 votes |
public static Bitmap image_ARGB8888_2_bitmap(DisplayMetrics metrics, Image image) { Image.Plane[] planes = image.getPlanes(); ByteBuffer buffer = planes[0].getBuffer(); int width = image.getWidth(); // Log.d("WOW", "image w = " + width); int height = image.getHeight(); // Log.d("WOW", "image h = " + height); int pixelStride = planes[0].getPixelStride(); // Log.d("WOW", "pixelStride is " + pixelStride); int rowStride = planes[0].getRowStride(); // Log.d("WOW", "row Stride is " + rowStride); int rowPadding = rowStride - pixelStride * width; // Log.d("WOW", "rowPadding is " + rowPadding); int offset = 0; Bitmap bitmap; bitmap = Bitmap.createBitmap(metrics, width, height, Bitmap.Config.ARGB_8888); for (int i = 0; i < height; ++i) { for (int j = 0; j < width; ++j) { int pixel = 0; pixel |= (buffer.get(offset) & 0xff) << 16; // R pixel |= (buffer.get(offset + 1) & 0xff) << 8; // G pixel |= (buffer.get(offset + 2) & 0xff); // B pixel |= (buffer.get(offset + 3) & 0xff) << 24; // A bitmap.setPixel(j, i, pixel); offset += pixelStride; } offset += rowPadding; } return bitmap; }
Example 10
Source File: HyperionScreenEncoder.java From hyperion-android-grabber with MIT License | 5 votes |
private void sendImage(Image img) { Image.Plane plane = img.getPlanes()[0]; ByteBuffer buffer = plane.getBuffer(); int width = img.getWidth(); int height = img.getHeight(); int pixelStride = plane.getPixelStride(); int rowStride = plane.getRowStride(); int firstX = 0; int firstY = 0; if (mRemoveBorders || mAvgColor) { mBorderProcessor.parseBorder(buffer, width, height, rowStride, pixelStride); BorderProcessor.BorderObject border = mBorderProcessor.getCurrentBorder(); if (border != null && border.isKnown()) { firstX = border.getHorizontalBorderIndex(); firstY = border.getVerticalBorderIndex(); } } if (mAvgColor) { mListener.sendFrame( getAverageColor(buffer, width, height, rowStride, pixelStride, firstX, firstY), 1, 1 ); } else { mListener.sendFrame( getPixels(buffer, width, height, rowStride, pixelStride, firstX, firstY), width - firstX * 2, height - firstY * 2 ); } }
Example 11
Source File: CameraHelp2.java From WeiXinRecordedDemo with MIT License | 5 votes |
private byte[] getYUVI420(Image image){ int width = image.getWidth(); int height = image.getHeight(); byte[] yuvI420 = new byte[image.getWidth()*image.getHeight()*3/2]; byte[] yData = new byte[image.getPlanes()[0].getBuffer().remaining()]; byte[] uData = new byte[image.getPlanes()[1].getBuffer().remaining()]; byte[] vData = new byte[image.getPlanes()[2].getBuffer().remaining()]; image.getPlanes()[0].getBuffer().get(yData); image.getPlanes()[1].getBuffer().get(uData); image.getPlanes()[2].getBuffer().get(vData); System.arraycopy(yData, 0, yuvI420, 0, yData.length); int index = yData.length; for (int r = 0; r < height / 2; ++r) { for (int c = 0; c < width; c += 2) { //各一个byte存一个U值和V值 yuvI420[index++] = uData[r * width + c]; } } for (int r = 0; r < height / 2; ++r) { for (int c = 0; c < width; c += 2) { //各一个byte存一个U值和V值 yuvI420[index++] = vData[r * width + c]; } } return yuvI420; }
Example 12
Source File: ImageDecoder.java From FastBarcodeScanner with Apache License 2.0 | 5 votes |
private static void getNV21(Image src, byte[] dest) { // Check nPlanes etc. Image.Plane yPlane = src.getPlanes()[0]; Image.Plane uPlane = src.getPlanes()[1]; Image.Plane vPlane = src.getPlanes()[2]; int ySize = yPlane.getBuffer().capacity(); int uSize = uPlane.getBuffer().capacity(); int vSize = vPlane.getBuffer().capacity(); if (ySize != src.getWidth() * src.getHeight()) throw new RuntimeException("Y-plane in planar YUV_420_888 is expected to be width*height bytes"); if (ySize != 2 * (uSize + 1)) throw new RuntimeException("U-plane in planar YUV_420_888 is expected to be (width*height/2 - 1) bytes"); if (ySize != 2 * (vSize + 1)) throw new RuntimeException("V-plane in planar YUV_420_888 is expected to be (width*height/2 - 1) bytes"); //int nextFree = getNonInterleaved(yPlane.getBuffer(), dest, 0); //getInterleaved(vPlane.getBuffer(), 2, dest, nextFree, 2); //getInterleaved(uPlane.getBuffer(), 2, dest, nextFree + 1, 2); int nextFree = 0; nextFree += getNonInterleaved(yPlane.getBuffer(), dest, nextFree); nextFree += getNonInterleaved(vPlane.getBuffer(), dest, nextFree); nextFree += getNonInterleaved(uPlane.getBuffer(), dest, nextFree); }
Example 13
Source File: SRManager.java From VMLibrary with Apache License 2.0 | 5 votes |
@Override public void onImageAvailable(ImageReader reader) { Image image = reader.acquireLatestImage(); long currTime = System.currentTimeMillis(); VMLog.d("捕获图片有效回调 %d", currTime - oldTime); if (currTime - oldTime > 100) { oldTime = currTime; Bitmap bitmap = null; if (image != null) { Image.Plane[] planes = image.getPlanes(); ByteBuffer buffer = planes[0].getBuffer(); int width = image.getWidth(); int height = image.getHeight(); int pixelStride = planes[0].getPixelStride(); int rowStride = planes[0].getRowStride(); int rowPadding = rowStride - pixelStride * width; // create bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888); bitmap.copyPixelsFromBuffer(buffer); bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height); if (screenShortCallback != null) { screenShortCallback.onBitmap(bitmap); } } } if (image != null) { image.close(); } }
Example 14
Source File: FragmentDecoder.java From camera2QRcodeReader with MIT License | 5 votes |
@Override public void onImageAvailable(ImageReader reader) { Log.e(TAG, "onImageAvailable: " + count++); Image img = null; img = reader.acquireLatestImage(); Result rawResult = null; try { if (img == null) throw new NullPointerException("cannot be null"); ByteBuffer buffer = img.getPlanes()[0].getBuffer(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); int width = img.getWidth(); int height = img.getHeight(); PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(data, width, height); BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source)); rawResult = mQrReader.decode(bitmap); onQRCodeRead(rawResult.getText()); } catch (ReaderException ignored) { Log.e(TAG, "Reader shows an exception! ", ignored); /* Ignored */ } catch (NullPointerException ex) { ex.printStackTrace(); } finally { mQrReader.reset(); Log.e(TAG, "in the finally! ------------"); if (img != null) img.close(); } if (rawResult != null) { Log.e(TAG, "Decoding successful!"); } else { Log.d(TAG, "No QR code found…"); } }
Example 15
Source File: ImageDecoder.java From FastBarcodeScanner with Apache License 2.0 | 4 votes |
private static int getNV21Size(Image src) { //return (int)(src.getHeight() * src.getWidth() * 1.5); return (int)(src.getHeight() * src.getWidth() * 2); }
Example 16
Source File: ImageUtils.java From FastBarcodeScanner with Apache License 2.0 | 4 votes |
public static byte[] getPlane(Image image, int planeNo) { ByteBuffer buffer; int rowStride; int pixelStride; int pixelWidth = image.getWidth(); int pixelHeight = image.getHeight(); int encodedRowStart = 0; Image.Plane[] planes = image.getPlanes(); int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8; byte[] pixels = new byte[image.getWidth() * image.getHeight() * bytesPerPixel]; byte[] rowData = new byte[planes[0].getRowStride()]; for (int i = 0; i < planes.length; i++) { buffer = planes[i].getBuffer(); rowStride = planes[i].getRowStride(); pixelStride = planes[i].getPixelStride(); int encodedWidthInPixels = (i == 0) ? pixelWidth : pixelWidth / 2; int encodedHeightInPixels = (i == 0) ? pixelHeight : pixelHeight / 2; for (int row = 0; row < encodedHeightInPixels; row++) { if (pixelStride == bytesPerPixel) { int encodedWidthInBytes = encodedWidthInPixels * bytesPerPixel; buffer.get(pixels, encodedRowStart, encodedWidthInBytes); // Advance buffer the remainder of the row stride, unless on the last row. // Otherwise, this will throw an IllegalArgumentException because the buffer // doesn't include the last padding. if (encodedHeightInPixels - row != 1) { int padding = rowStride - encodedWidthInBytes; buffer.position(buffer.position() + padding); } encodedRowStart += encodedWidthInBytes; } else { // On the last row only read the width of the image minus the pixel stride // plus one. Otherwise, this will throw a BufferUnderflowException because the // buffer doesn't include the last padding. if (encodedHeightInPixels - row == 1) { buffer.get(rowData, 0, pixelWidth - pixelStride + 1); } else { buffer.get(rowData, 0, rowStride); } for (int col = 0; col < encodedWidthInPixels; col++) { pixels[encodedRowStart + col] = rowData[col * pixelStride]; } //encodedRowStart += encodedWidthInBytes; } } } // Finally, create the Mat. //Mat mat = new Mat(pixelHeight + pixelHeight / 2, pixelWidth, CvType.CV_8UC1); //mat.put(0, 0, pixels); return pixels; }
Example 17
Source File: TensorflowImageListener.java From AndroidDemoProjects with Apache License 2.0 | 4 votes |
@Override public void onImageAvailable(final ImageReader reader) { Image image = null; try { image = reader.acquireLatestImage(); if (image == null) { return; } // No mutex needed as this method is not reentrant. if (computing) { image.close(); return; } computing = true; Trace.beginSection("imageAvailable"); final Plane[] planes = image.getPlanes(); // Initialize the storage bitmaps once when the resolution is known. if (previewWidth != image.getWidth() || previewHeight != image.getHeight()) { previewWidth = image.getWidth(); previewHeight = image.getHeight(); LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight); rgbBytes = new int[previewWidth * previewHeight]; rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888); yuvBytes = new byte[planes.length][]; for (int i = 0; i < planes.length; ++i) { yuvBytes[i] = new byte[planes[i].getBuffer().capacity()]; } } for (int i = 0; i < planes.length; ++i) { planes[i].getBuffer().get(yuvBytes[i]); } final int yRowStride = planes[0].getRowStride(); final int uvRowStride = planes[1].getRowStride(); final int uvPixelStride = planes[1].getPixelStride(); ImageUtils.convertYUV420ToARGB8888( yuvBytes[0], yuvBytes[1], yuvBytes[2], rgbBytes, previewWidth, previewHeight, yRowStride, uvRowStride, uvPixelStride, false); image.close(); } catch (final Exception e) { if (image != null) { image.close(); } LOGGER.e(e, "Exception!"); Trace.endSection(); return; } rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight); drawResizedBitmap(rgbFrameBitmap, croppedBitmap); // For examining the actual TF input. if (SAVE_PREVIEW_BITMAP) { ImageUtils.saveBitmap(croppedBitmap); } handler.post( new Runnable() { @Override public void run() { final List<Classifier.Recognition> results = tensorflow.recognizeImage(croppedBitmap); LOGGER.v("%d results", results.size()); for (final Classifier.Recognition result : results) { LOGGER.v("Result: " + result.getTitle()); } scoreView.setResults(results); computing = false; } }); Trace.endSection(); }
Example 18
Source File: Screenshotter.java From RelaxFinger with GNU General Public License v2.0 | 4 votes |
@TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public void onImageAvailable(ImageReader reader) { Image image = null; try { image = reader.acquireLatestImage(); }catch (UnsupportedOperationException e){ e.printStackTrace(); return; } if(image == null){ return; } int width = image.getWidth(); int height = image.getHeight(); final Image.Plane[] planes = image.getPlanes(); final ByteBuffer buffer = planes[0].getBuffer(); int pixelStride = planes[0].getPixelStride(); int rowStride = planes[0].getRowStride(); int rowPadding = rowStride - pixelStride * width; Bitmap bitmap = Bitmap.createBitmap(width+rowPadding/pixelStride, height, Bitmap.Config.ARGB_8888); bitmap.copyPixelsFromBuffer(buffer); bitmap = Bitmap.createBitmap(bitmap, 0, 0,width, height); cb.onScreenshot(bitmap); if(virtualDisplay!= null){ virtualDisplay.release(); virtualDisplay = null; } if (mMediaProjection != null) { mMediaProjection.stop(); mMediaProjection = null; } image.close(); mImageReader = null; }
Example 19
Source File: ResultProcessor.java From libsoftwaresync with Apache License 2.0 | 4 votes |
private static YuvImage yuvImageFromNv21Image(Image src) { long t0 = System.nanoTime(); Image.Plane[] planes = src.getPlanes(); Image.Plane luma = planes[0]; Image.Plane chromaU = planes[1]; Image.Plane chromaV = planes[2]; int width = src.getWidth(); int height = src.getHeight(); // Luma should be tightly packed and chroma should be tightly interleaved. assert (luma.getPixelStride() == 1); assert (chromaU.getPixelStride() == 2); assert (chromaV.getPixelStride() == 2); // Duplicate (shallow copy) each buffer so as to not disturb the underlying position/limit/etc. ByteBuffer lumaBuffer = luma.getBuffer().duplicate(); ByteBuffer chromaUBuffer = chromaU.getBuffer().duplicate(); ByteBuffer chromaVBuffer = chromaV.getBuffer().duplicate(); // Yes, y, v, then u since it's NV21. int[] yvuRowStrides = new int[] {luma.getRowStride(), chromaV.getRowStride(), chromaU.getRowStride()}; // Compute bytes needed to concatenate all the (potentially padded) YUV data in one buffer. int lumaBytes = height * luma.getRowStride(); int interleavedChromaBytes = (height / 2) * chromaV.getRowStride(); assert (lumaBuffer.capacity() == lumaBytes); int packedYVUBytes = lumaBytes + interleavedChromaBytes; byte[] packedYVU = new byte[packedYVUBytes]; int packedYVUOffset = 0; lumaBuffer.get( packedYVU, packedYVUOffset, lumaBuffer.capacity()); // packedYVU[0..lumaBytes) <-- lumaBuffer. packedYVUOffset += lumaBuffer.capacity(); // Write the V buffer. Since the V buffer contains U data, write all of V and then check how // much U data is left over. There be at most 1 byte plus padding. chromaVBuffer.get(packedYVU, packedYVUOffset, /*length=*/ chromaVBuffer.capacity()); packedYVUOffset += chromaVBuffer.capacity(); // Write the remaining portion of the U buffer (if any). int chromaUPosition = chromaVBuffer.capacity() - 1; if (chromaUPosition < chromaUBuffer.capacity()) { chromaUBuffer.position(chromaUPosition); int remainingBytes = Math.min(chromaUBuffer.remaining(), lumaBytes - packedYVUOffset); if (remainingBytes > 0) { chromaUBuffer.get(packedYVU, packedYVUOffset, remainingBytes); } } YuvImage yuvImage = new YuvImage(packedYVU, ImageFormat.NV21, width, height, yvuRowStrides); long t1 = System.nanoTime(); Log.i(TAG, String.format("yuvImageFromNv212Image took %f ms.", (t1 - t0) * 1e-6f)); return yuvImage; }
Example 20
Source File: DngCreator.java From android_9.0.0_r45 with Apache License 2.0 | 4 votes |
/** * Generate a direct RGB {@link ByteBuffer} from a YUV420_888 {@link Image}. */ private static ByteBuffer convertToRGB(Image yuvImage) { // TODO: Optimize this with renderscript intrinsic. int width = yuvImage.getWidth(); int height = yuvImage.getHeight(); ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height); Image.Plane yPlane = yuvImage.getPlanes()[0]; Image.Plane uPlane = yuvImage.getPlanes()[1]; Image.Plane vPlane = yuvImage.getPlanes()[2]; ByteBuffer yBuf = yPlane.getBuffer(); ByteBuffer uBuf = uPlane.getBuffer(); ByteBuffer vBuf = vPlane.getBuffer(); yBuf.rewind(); uBuf.rewind(); vBuf.rewind(); int yRowStride = yPlane.getRowStride(); int vRowStride = vPlane.getRowStride(); int uRowStride = uPlane.getRowStride(); int yPixStride = yPlane.getPixelStride(); int vPixStride = vPlane.getPixelStride(); int uPixStride = uPlane.getPixelStride(); byte[] yuvPixel = { 0, 0, 0 }; byte[] yFullRow = new byte[yPixStride * (width - 1) + 1]; byte[] uFullRow = new byte[uPixStride * (width / 2 - 1) + 1]; byte[] vFullRow = new byte[vPixStride * (width / 2 - 1) + 1]; byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width]; for (int i = 0; i < height; i++) { int halfH = i / 2; yBuf.position(yRowStride * i); yBuf.get(yFullRow); uBuf.position(uRowStride * halfH); uBuf.get(uFullRow); vBuf.position(vRowStride * halfH); vBuf.get(vFullRow); for (int j = 0; j < width; j++) { int halfW = j / 2; yuvPixel[0] = yFullRow[yPixStride * j]; yuvPixel[1] = uFullRow[uPixStride * halfW]; yuvPixel[2] = vFullRow[vPixStride * halfW]; yuvToRgb(yuvPixel, j * BYTES_PER_RGB_PIX, /*out*/finalRow); } buf.put(finalRow); } yBuf.rewind(); uBuf.rewind(); vBuf.rewind(); buf.rewind(); return buf; }