Java Code Examples for android.graphics.ImageFormat#getBitsPerPixel()
The following examples show how to use
android.graphics.ImageFormat#getBitsPerPixel() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CameraSource.java From prebid-mobile-android with Apache License 2.0 | 6 votes |
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of * the camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // // NOTICE: This code only works when using play services v. 8.1 or higher. // // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException("Failed to create valid buffer for camera source."); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 2
Source File: VideoStream.java From VideoMeeting with Apache License 2.0 | 6 votes |
public synchronized void startPreview() { if (mCamera != null) { mCamera.startPreview(); try { mCamera.autoFocus(null); } catch (Exception e) { //忽略异常 L.i("auto foucus fail"); } int previewFormat = mCamera.getParameters().getPreviewFormat(); Camera.Size previewSize = mCamera.getParameters().getPreviewSize(); int size = previewSize.width * previewSize.height * ImageFormat.getBitsPerPixel(previewFormat) / 8; mCamera.addCallbackBuffer(new byte[size]); } }
Example 3
Source File: CameraSource.java From android-vision with Apache License 2.0 | 6 votes |
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of * the camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // // NOTICE: This code only works when using play services v. 8.1 or higher. // // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException("Failed to create valid buffer for camera source."); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 4
Source File: CameraSource.java From Camera2Vision with Apache License 2.0 | 6 votes |
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of * the camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // // NOTICE: This code only works when using play services v. 8.1 or higher. // // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException("Failed to create valid buffer for camera source."); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 5
Source File: CameraSource.java From particle-android with Apache License 2.0 | 6 votes |
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of the * camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ @SuppressLint("InlinedApi") private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = (long) previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException("Failed to create valid buffer for camera source."); } bytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 6
Source File: CameraSource.java From Bluefruit_LE_Connect_Android with MIT License | 6 votes |
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of * the camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // // NOTICE: This code only works when using play services v. 8.1 or higher. // // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException("Failed to create valid buffer for camera source."); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 7
Source File: CameraSource.java From Document-Scanner with GNU General Public License v3.0 | 6 votes |
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of * the camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // // NOTICE: This code only works when using play services v. 8.1 or higher. // // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException("Failed to create valid buffer for camera source."); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 8
Source File: JellyBeanCamera.java From LiveMultimedia with Apache License 2.0 | 6 votes |
/********************************************************** * capture video frame one by one from the preview window * setup the buffer to hold the images **********************************************************/ private synchronized void setupVideoFrameCallback() { Log.d(TAG, "setupVideoFrameCallback(() called on the Camera class"); if (mCamera == null) { Log.e(TAG, "Camera object is null in setupVideoFrameCallback!"); return; } mFrameCatcher = new FrameCatcher( mPreviewWidth, mPreviewHeight, getImageFormat(), mVideoPreview); long bufferSize; bufferSize = mPreviewWidth * mPreviewHeight * ImageFormat.getBitsPerPixel(mImageFormat) / 8; long sizeWeShouldHave = (mPreviewWidth * mPreviewHeight * 3 / 2); mCamera.setPreviewCallbackWithBuffer(null); mCamera.setPreviewCallbackWithBuffer( mFrameCatcher ); for (int i = 0; i < NUM_CAMERA_PREVIEW_BUFFERS; i++) { byte [] cameraBuffer = new byte[(int)bufferSize]; mCamera.addCallbackBuffer(cameraBuffer); } }
Example 9
Source File: CameraSource.java From Barcode-Reader with BSD 3-Clause "New" or "Revised" License | 6 votes |
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of * the camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // // NOTICE: This code only works when using play services v. 8.1 or higher. // // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException("Failed to create valid buffer for camera source."); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 10
Source File: RxCameraInternal.java From RxCamera with MIT License | 6 votes |
private int getPreviewBufferSizeFromParameter() { Log.d(TAG, "getPreviewBufferSizeFromParameter, previewFormat: " + camera.getParameters().getPreviewFormat() + ", " + "previewSize: " + camera.getParameters().getPreviewSize() + ", bitsPerPixels: " + ImageFormat.getBitsPerPixel(camera.getParameters().getPreviewFormat())); if (camera.getParameters().getPreviewFormat() == ImageFormat.YV12) { int width = camera.getParameters().getPreviewSize().width; int height = camera.getParameters().getPreviewSize().height; int yStride = (int) Math.ceil(width / 16.0) * 16; int uvStride = (int) Math.ceil((yStride / 2) / 16.0) * 16; int ySize = yStride * height; int uvSize = uvStride * height / 2; int size = ySize + uvSize * 2; return size; } else { return camera.getParameters().getPreviewSize().width * camera.getParameters().getPreviewSize().height * ImageFormat.getBitsPerPixel(camera.getParameters().getPreviewFormat()) / 8; } }
Example 11
Source File: CameraSource.java From AndroidApp with GNU Affero General Public License v3.0 | 6 votes |
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of * the camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // // NOTICE: This code only works when using play services v. 8.1 or higher. // // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException("Failed to create valid buffer for camera source."); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 12
Source File: CameraSource.java From trust-wallet-android-source with GNU General Public License v3.0 | 6 votes |
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of * the camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // // NOTICE: This code only works when using play services v. 8.1 or higher. // // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException("Failed to create valid buffer for camera source."); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 13
Source File: CameraEngine.java From Fatigue-Detection with MIT License | 5 votes |
public void openCamera(boolean facingFront) { synchronized (this) { int facing=facingFront? Camera.CameraInfo.CAMERA_FACING_FRONT:Camera.CameraInfo.CAMERA_FACING_BACK; currentCameraId=getCameraIdWithFacing(facing); camera = Camera.open(currentCameraId); camera.setPreviewCallbackWithBuffer(this); initRotateDegree(currentCameraId); if (camera != null) { mParams = camera.getParameters(); List<Camera.Size> supportedPictureSizesList=mParams.getSupportedPictureSizes(); List<Camera.Size> supportedVideoSizesList=mParams.getSupportedVideoSizes(); List<Camera.Size> supportedPreviewSizesList=mParams.getSupportedPreviewSizes(); Logger.logCameraSizes(supportedPictureSizesList); Logger.logCameraSizes(supportedVideoSizesList); Logger.logCameraSizes(supportedPreviewSizesList); previewSize=choosePreferredSize(supportedPreviewSizesList,preferredRatio); Camera.Size photoSize=choosePreferredSize(supportedPictureSizesList,preferredRatio); frameHeight=previewSize.width; frameWidth=previewSize.height; Log.d(TAG, "openCamera: choose preview size"+previewSize.height+"x"+previewSize.width); mParams.setPreviewSize(frameHeight,frameWidth); mParams.setPictureSize(photoSize.width,photoSize.height); Log.d(TAG, "openCamera: choose photo size"+photoSize.height+"x"+photoSize.width); //mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); int size = frameWidth*frameHeight; size = size * ImageFormat.getBitsPerPixel(mParams.getPreviewFormat()) / 8; if (mBuffer==null || mBuffer.length!=size) mBuffer = new byte[size]; mFrameChain[0].init(size); mFrameChain[1].init(size); camera.addCallbackBuffer(mBuffer); camera.setParameters(mParams); cameraOpened=true; } } }
Example 14
Source File: CameraSource.java From flutter_barcode_scanner with MIT License | 5 votes |
private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { throw new IllegalStateException("Failed to create valid buffer for camera source."); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
Example 15
Source File: CameraEngine.java From In77Camera with MIT License | 5 votes |
public void openCamera(boolean facingFront) { synchronized (this) { int facing=facingFront? Camera.CameraInfo.CAMERA_FACING_FRONT:Camera.CameraInfo.CAMERA_FACING_BACK; currentCameraId=getCameraIdWithFacing(facing); camera = Camera.open(currentCameraId); camera.setPreviewCallbackWithBuffer(this); initRotateDegree(currentCameraId); if (camera != null) { mParams = camera.getParameters(); List<Camera.Size> supportedPictureSizesList=mParams.getSupportedPictureSizes(); List<Camera.Size> supportedVideoSizesList=mParams.getSupportedVideoSizes(); List<Camera.Size> supportedPreviewSizesList=mParams.getSupportedPreviewSizes(); Logger.logCameraSizes(supportedPictureSizesList); Logger.logCameraSizes(supportedVideoSizesList); Logger.logCameraSizes(supportedPreviewSizesList); previewSize=choosePreferredSize(supportedPreviewSizesList,preferredRatio); Camera.Size photoSize=choosePreferredSize(supportedPictureSizesList,preferredRatio); frameHeight=previewSize.width; frameWidth=previewSize.height; Log.d(TAG, "openCamera: choose preview size"+previewSize.height+"x"+previewSize.width); mParams.setPreviewSize(frameHeight,frameWidth); mParams.setPictureSize(photoSize.width,photoSize.height); Log.d(TAG, "openCamera: choose photo size"+photoSize.height+"x"+photoSize.width); //mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); int size = frameWidth*frameHeight; size = size * ImageFormat.getBitsPerPixel(mParams.getPreviewFormat()) / 8; if (mBuffer==null || mBuffer.length!=size) mBuffer = new byte[size]; mFrameChain[0].init(size); mFrameChain[1].init(size); camera.addCallbackBuffer(mBuffer); camera.setParameters(mParams); cameraOpened=true; } } }
Example 16
Source File: ExtVideoCapture.java From PLDroidMediaStreaming with Apache License 2.0 | 5 votes |
@Override public void surfaceCreated(SurfaceHolder holder) { if (safeCameraOpen(mCurrentFacingId)) { try { mCamera.setPreviewDisplay(holder); int degree = getDeviceRotationDegree(mContext); Camera.CameraInfo camInfo = new Camera.CameraInfo(); Camera.getCameraInfo(mCurrentFacingId, camInfo); int orientation; if (mCurrentFacingId == Camera.CameraInfo.CAMERA_FACING_FRONT) { orientation = (camInfo.orientation + degree) % 360; mCameraPreviewDegree = orientation; orientation = (360 - orientation) % 360; // compensate the mirror } else { // back-facing orientation = (camInfo.orientation - degree + 360) % 360; mCameraPreviewDegree = orientation; } mCamera.setDisplayOrientation(orientation); Camera.Parameters params = mCamera.getParameters(); params.setPreviewFormat(ImageFormat.NV21); final Camera.Size previewSize = params.getPreviewSize(); final int bitsPerPixel = ImageFormat.getBitsPerPixel(params.getPreviewFormat()); final int previewBufferSize = (previewSize.width * previewSize.height * bitsPerPixel) / 8; for (int i = 0; i < MAX_CALLBACK_BUFFER_NUM; i++) { mCamera.addCallbackBuffer(new byte[previewBufferSize]); } mPreviewWidth = previewSize.width; mPreviewHeight = previewSize.height; mCamera.setPreviewCallbackWithBuffer(this); } catch (IOException e) { e.printStackTrace(); } } }
Example 17
Source File: CameraPreviewView.java From rosjava_android_template with Apache License 2.0 | 5 votes |
private void setupBufferingPreviewCallback() { int format = camera.getParameters().getPreviewFormat(); int bits_per_pixel = ImageFormat.getBitsPerPixel(format); previewBuffer = new byte[previewSize.height * previewSize.width * bits_per_pixel / 8]; camera.addCallbackBuffer(previewBuffer); camera.setPreviewCallbackWithBuffer(bufferingPreviewCallback); }
Example 18
Source File: CameraEnumerationAndroid.java From webrtc_android with MIT License | 5 votes |
public static int frameSize(int width, int height, int imageFormat) { if (imageFormat != ImageFormat.NV21) { throw new UnsupportedOperationException("Don't know how to calculate " + "the frame size of non-NV21 image formats."); } return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8; }
Example 19
Source File: CameraStreamer.java From peepers with Apache License 2.0 | 4 votes |
private void startStreamingIfRunning() throws IOException { // Throws RuntimeException if the camera is currently opened // by another application. final Camera camera = Camera.open(mCameraIndex); final Camera.Parameters params = camera.getParameters(); final List<Camera.Size> supportedPreviewSizes = params.getSupportedPreviewSizes(); final Camera.Size selectedPreviewSize = supportedPreviewSizes.get(mPreviewSizeIndex); params.setPreviewSize(selectedPreviewSize.width, selectedPreviewSize.height); if (mUseFlashLight) { params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH); } // if // Set Preview FPS range. The range with the greatest maximum // is returned first. final List<int[]> supportedPreviewFpsRanges = params.getSupportedPreviewFpsRange(); // XXX: However sometimes it returns null. This is a known bug // https://code.google.com/p/android/issues/detail?id=6271 // In which case, we just don't set it. if (supportedPreviewFpsRanges != null) { final int[] range = supportedPreviewFpsRanges.get(0); params.setPreviewFpsRange(range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); camera.setParameters(params); } // if // Set up preview callback mPreviewFormat = params.getPreviewFormat(); final Camera.Size previewSize = params.getPreviewSize(); mPreviewWidth = previewSize.width; mPreviewHeight = previewSize.height; final int BITS_PER_BYTE = 8; final int bytesPerPixel = ImageFormat.getBitsPerPixel(mPreviewFormat) / BITS_PER_BYTE; // XXX: According to the documentation the buffer size can be // calculated by width * height * bytesPerPixel. However, this // returned an error saying it was too small. It always needed // to be exactly 1.5 times larger. mPreviewBufferSize = mPreviewWidth * mPreviewHeight * bytesPerPixel * 3 / 2 + 1; camera.addCallbackBuffer(new byte[mPreviewBufferSize]); mPreviewRect = new Rect(0, 0, mPreviewWidth, mPreviewHeight); camera.setPreviewCallbackWithBuffer(mPreviewCallback); // We assumed that the compressed image will be no bigger than // the uncompressed image. mJpegOutputStream = new MemoryOutputStream(mPreviewBufferSize); final MJpegHttpStreamer streamer = new MJpegHttpStreamer(mPort, mPreviewBufferSize); streamer.start(); synchronized (mLock) { if (!mRunning) { streamer.stop(); camera.release(); return; } // if try { camera.setPreviewDisplay(mPreviewDisplay); } // try catch (final IOException e) { streamer.stop(); camera.release(); throw e; } // catch mMJpegHttpStreamer = streamer; camera.startPreview(); mCamera = camera; } // synchronized }
Example 20
Source File: ImageUtils.java From FastBarcodeScanner with Apache License 2.0 | 4 votes |
/** * Takes an Android Image in the YUV_420_888 format and returns an OpenCV Mat. * * @param image Image in the YUV_420_888 format. * @return OpenCV Mat. */ public static byte[] imageToMat(Image image) { ByteBuffer buffer; int rowStride; int pixelStride; int pixelWidth = image.getWidth(); int pixelHeight = image.getHeight(); int encodedRowStart = 0; Image.Plane[] planes = image.getPlanes(); int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8; byte[] pixels = new byte[image.getWidth() * image.getHeight() * bytesPerPixel]; byte[] rowData = new byte[planes[0].getRowStride()]; for (int i = 0; i < planes.length; i++) { buffer = planes[i].getBuffer(); rowStride = planes[i].getRowStride(); pixelStride = planes[i].getPixelStride(); int encodedWidthInPixels = (i == 0) ? pixelWidth : pixelWidth / 2; int encodedHeightInPixels = (i == 0) ? pixelHeight : pixelHeight / 2; for (int row = 0; row < encodedHeightInPixels; row++) { if (pixelStride == bytesPerPixel) { int encodedWidthInBytes = encodedWidthInPixels * bytesPerPixel; buffer.get(pixels, encodedRowStart, encodedWidthInBytes); // Advance buffer the remainder of the row stride, unless on the last row. // Otherwise, this will throw an IllegalArgumentException because the buffer // doesn't include the last padding. if (encodedHeightInPixels - row != 1) { int padding = rowStride - encodedWidthInBytes; buffer.position(buffer.position() + padding); } encodedRowStart += encodedWidthInBytes; } else { // On the last row only read the width of the image minus the pixel stride // plus one. Otherwise, this will throw a BufferUnderflowException because the // buffer doesn't include the last padding. if (encodedHeightInPixels - row == 1) { buffer.get(rowData, 0, pixelWidth - pixelStride + 1); } else { buffer.get(rowData, 0, rowStride); } for (int col = 0; col < encodedWidthInPixels; col++) { pixels[encodedRowStart++] = rowData[col * pixelStride]; } } } } // Finally, create the Mat. //Mat mat = new Mat(pixelHeight + pixelHeight / 2, pixelWidth, CvType.CV_8UC1); //mat.put(0, 0, pixels); return pixels; }