Java Code Examples for android.opengl.GLES20#glReadPixels()
The following examples show how to use
android.opengl.GLES20#glReadPixels() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SurfaceDrawable.java From libcommon with Apache License 2.0 | 7 votes |
@WorkerThread protected void handleDraw() { if (DEBUG && ((++drawCnt % 100) == 0)) Log.v(TAG, "handleDraw:" + drawCnt); mEglTask.removeRequest(REQUEST_DRAW); try { mEglTask.makeCurrent(); mInputTexture.updateTexImage(); mInputTexture.getTransformMatrix(mTexMatrix); } catch (final Exception e) { Log.e(TAG, "handleDraw:thread id =" + Thread.currentThread().getId(), e); return; } // OESテクスチャをオフスクリーン(マスターサーフェース)へ描画 mDrawer.draw(mTexId, mTexMatrix, 0); // オフスクリーンから読み取り mWorkBuffer.clear(); GLES20.glReadPixels(0, 0, mImageWidth, mImageHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mWorkBuffer); // Bitmapへ代入 mWorkBuffer.clear(); synchronized (mBitmap) { mBitmap.copyPixelsFromBuffer(mWorkBuffer); } invalidateSelf(); }
Example 2
Source File: BitmapUtils.java From In77Camera with MIT License | 7 votes |
public static void sendImage(int width, int height, Context context, FileUtils.FileSavedCallback fileSavedCallback) { final IntBuffer pixelBuffer = IntBuffer.allocate(width * height); //about 20-50ms long start = System.nanoTime(); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); long end = System.nanoTime(); Log.d(TAG, "glReadPixels time: " + (end - start)/1000000+" ms"); //about 700-4000ms(png) 200-1000ms(jpeg) //use jpeg instead of png to save time //it will consume large memory and may take a long time, depends on the phone new SaveBitmapTask(pixelBuffer,width,height,context,fileSavedCallback).execute(); }
Example 3
Source File: OffscreenImage.java From EZFilter with MIT License | 7 votes |
public Bitmap capture(int width, int height) { mPipeline.onSurfaceChanged(null, width, height); mPipeline.startRender(); mPipeline.onDrawFrame(null); int[] iat = new int[mWidth * mHeight]; IntBuffer ib = IntBuffer.allocate(width * height); GLES20.glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, ib); int[] ia = ib.array(); for (int i = 0; i < mHeight; i++) { System.arraycopy(ia, i * mWidth, iat, (mHeight - i - 1) * mWidth, mWidth); } Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); bitmap.copyPixelsFromBuffer(IntBuffer.wrap(iat)); mPipeline.onSurfaceDestroyed(); // 释放EGL环境 mInputSurface.release(); mEgl.release(); return bitmap; }
Example 4
Source File: BitmapUtils.java From Fatigue-Detection with MIT License | 6 votes |
public static void sendImage(int width, int height, Context context, FileUtils.FileSavedCallback fileSavedCallback) { final IntBuffer pixelBuffer = IntBuffer.allocate(width * height); //about 20-50ms long start = System.nanoTime(); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); long end = System.nanoTime(); Log.d(TAG, "glReadPixels time: " + (end - start)/1000000+" ms"); //about 700-4000ms(png) 200-1000ms(jpeg) //use jpeg instead of png to save time //it will consume large memory and may take a long time, depends on the phone new SaveBitmapTask(pixelBuffer,width,height,context,fileSavedCallback).execute(); }
Example 5
Source File: BitmapUtils.java From Fatigue-Detection with MIT License | 5 votes |
public static Bitmap getScreenShot(int width, int height){ IntBuffer pixelBuffer = IntBuffer.allocate(width * height); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); int[] pixelMirroredArray = new int[width * height]; int[] pixelArray = pixelBuffer.array(); for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { pixelMirroredArray[(height - i - 1) * width + j] = pixelArray[i * width + j]; } } Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); return bmp; }
Example 6
Source File: BitmapUtils.java From In77Camera with MIT License | 5 votes |
public static Bitmap getScreenShot(int width, int height){ IntBuffer pixelBuffer = IntBuffer.allocate(width * height); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); int[] pixelMirroredArray = new int[width * height]; int[] pixelArray = pixelBuffer.array(); for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { pixelMirroredArray[(height - i - 1) * width + j] = pixelArray[i * width + j]; } } Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); return bmp; }
Example 7
Source File: ScreenGrabber.java From 30-android-libraries-in-30-days with Apache License 2.0 | 5 votes |
private static Bitmap grab(final int pGrabX, final int pGrabY, final int pGrabWidth, final int pGrabHeight) { final int[] source = new int[pGrabWidth * (pGrabY + pGrabHeight)]; final IntBuffer sourceBuffer = IntBuffer.wrap(source); sourceBuffer.position(0); // TODO Check availability of OpenGL and GLES20.GL_RGBA combinations that require less conversion operations. // Note: There is (said to be) a bug with glReadPixels when 'y != 0', so we simply read starting from 'y == 0'. // TODO Does that bug still exist? GLES20.glReadPixels(pGrabX, 0, pGrabWidth, pGrabY + pGrabHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, sourceBuffer); final int[] pixels = new int[pGrabWidth * pGrabHeight]; // Convert from RGBA_8888 (Which is actually ABGR as the whole buffer seems to be inverted) --> ARGB_8888 for (int y = 0; y < pGrabHeight; y++) { for (int x = 0; x < pGrabWidth; x++) { final int pixel = source[x + ((pGrabY + y) * pGrabWidth)]; final int blue = (pixel & 0x00FF0000) >> 16; final int red = (pixel & 0x000000FF) << 16; final int greenAlpha = pixel & 0xFF00FF00; pixels[x + ((pGrabHeight - y - 1) * pGrabWidth)] = greenAlpha | red | blue; } } return Bitmap.createBitmap(pixels, pGrabWidth, pGrabHeight, Config.ARGB_8888); }
Example 8
Source File: ExtractMpegFramesTest.java From Android-MediaCodec-Examples with Apache License 2.0 | 4 votes |
/** * Saves the current frame to disk as a PNG image. */ public void saveFrame(String filename) throws IOException { // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA // data (i.e. a byte of red, followed by a byte of green...). To use the Bitmap // constructor that takes an int[] array with pixel data, we need an int[] filled // with little-endian ARGB data. // // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just // copying data around for a 720p frame. It's better to do a bulk get() and then // rearrange the data in memory. (For comparison, the PNG compress takes about 500ms // for a trivial frame.) // // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer // get() into a straight memcpy on most Android devices. Our ints will hold ABGR data. // Swapping B and R gives us ARGB. We need about 30ms for the bulk get(), and another // 270ms for the color swap. // // We can avoid the costly B/R swap here if we do it in the fragment shader (see // http://stackoverflow.com/questions/21634450/ ). // // Having said all that... it turns out that the Bitmap#copyPixelsFromBuffer() // method wants RGBA pixels, not ARGB, so if we create an empty bitmap and then // copy pixel data in we can avoid the swap issue entirely, and just copy straight // into the Bitmap from the ByteBuffer. // // Making this even more interesting is the upside-down nature of GL, which means // our output will look upside-down relative to what appears on screen if the // typical GL conventions are used. (For ExtractMpegFrameTest, we avoid the issue // by inverting the frame when we render it.) // // Allocating large buffers is expensive, so we really want mPixelBuf to be // allocated ahead of time if possible. We still get some allocations from the // Bitmap / PNG creation. mPixelBuf.rewind(); GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); BufferedOutputStream bos = null; try { bos = new BufferedOutputStream(new FileOutputStream(filename)); Bitmap bmp = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888); mPixelBuf.rewind(); bmp.copyPixelsFromBuffer(mPixelBuf); bmp.compress(Bitmap.CompressFormat.PNG, 90, bos); bmp.recycle(); } finally { if (bos != null) bos.close(); } if (VERBOSE) { Log.d(TAG, "Saved " + mWidth + "x" + mHeight + " frame as '" + filename + "'"); } }
Example 9
Source File: OutputSurface.java From TelePlus-Android with GNU General Public License v2.0 | 4 votes |
public ByteBuffer getFrame() { mPixelBuf.rewind(); GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); return mPixelBuf; }
Example 10
Source File: OutputSurface.java From deltachat-android with GNU General Public License v3.0 | 4 votes |
public ByteBuffer getFrame() { mPixelBuf.rewind(); GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); return mPixelBuf; }
Example 11
Source File: OutputSurface.java From VideoCompressor with Apache License 2.0 | 4 votes |
public ByteBuffer getFrame() { mPixelBuf.rewind(); GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); return mPixelBuf; }
Example 12
Source File: EglRenderer.java From VideoCRE with MIT License | 4 votes |
private void notifyCallbacks( VideoRenderer.I420Frame frame, int[] yuvTextures, float[] texMatrix, boolean wasRendered) { if (frameListeners.isEmpty()) return; final float[] bitmapMatrix = RendererCommon.multiplyMatrices( RendererCommon.multiplyMatrices(texMatrix, mirror ? RendererCommon.horizontalFlipMatrix() : RendererCommon.identityMatrix()), RendererCommon.verticalFlipMatrix()); Iterator<FrameListenerAndParams> it = frameListeners.iterator(); while (it.hasNext()) { FrameListenerAndParams listenerAndParams = it.next(); if (!wasRendered && listenerAndParams.applyFpsReduction) { continue; } it.remove(); final int scaledWidth = (int) (listenerAndParams.scale * frame.rotatedWidth()); final int scaledHeight = (int) (listenerAndParams.scale * frame.rotatedHeight()); if (scaledWidth == 0 || scaledHeight == 0) { listenerAndParams.listener.onFrame(null); continue; } if (bitmapTextureFramebuffer == null) { bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA); } bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId()); GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0); GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); if (frame.yuvFrame) { listenerAndParams.drawer.drawYuv(yuvTextures, bitmapMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight); } else { listenerAndParams.drawer.drawOes(frame.textureId, bitmapMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0 /* viewportX */, 0 /* viewportY */, scaledWidth, scaledHeight); } final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4); GLES20.glViewport(0, 0, scaledWidth, scaledHeight); GLES20.glReadPixels( 0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks"); final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888); bitmap.copyPixelsFromBuffer(bitmapBuffer); listenerAndParams.listener.onFrame(bitmap); } }
Example 13
Source File: EglSurfaceBase.java From FuAgoraDemoDroid with MIT License | 4 votes |
/** * Saves the EGL surface to a file. * <p> * Expects that this object's EGL surface is current. */ public void saveFrame(File file) throws IOException { if (!mEglCore.isCurrent(mEGLSurface)) { throw new RuntimeException("Expected EGL context/surface is not current"); } // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the // Bitmap "copy pixels" method wants the same format GL provides. // // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling // here often. // // Making this even more interesting is the upside-down nature of GL, which means // our output will look upside down relative to what appears on screen if the // typical GL conventions are used. String filename = file.toString(); int width = getWidth(); int height = getHeight(); ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); buf.order(ByteOrder.LITTLE_ENDIAN); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); GlUtil.checkGlError("glReadPixels"); buf.rewind(); BufferedOutputStream bos = null; try { bos = new BufferedOutputStream(new FileOutputStream(filename)); Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); bmp.copyPixelsFromBuffer(buf); bmp.compress(Bitmap.CompressFormat.PNG, 90, bos); bmp.recycle(); } finally { if (bos != null) bos.close(); } Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'"); }
Example 14
Source File: OpenGlUtils.java From TikTok with Apache License 2.0 | 4 votes |
public static Bitmap drawToBitmapByFilter(Bitmap bitmap, GPUImageFilter filter, int displayWidth, int displayHeight, boolean rotate){ if(filter == null) return null; int width = bitmap.getWidth(); int height = bitmap.getHeight(); int[] mFrameBuffers = new int[1]; int[] mFrameBufferTextures = new int[1]; GLES20.glGenFramebuffers(1, mFrameBuffers, 0); GLES20.glGenTextures(1, mFrameBufferTextures, 0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0]); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[0]); GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0], 0); GLES20.glViewport(0, 0, width, height); filter.onInputSizeChanged(width, height); filter.onDisplaySizeChanged(displayWidth, displayHeight); int textureId = OpenGlUtils.loadTexture(bitmap, OpenGlUtils.NO_TEXTURE, true); if(rotate){ FloatBuffer gLCubeBuffer = ByteBuffer .allocateDirect(TextureRotationUtil.CUBE_BAAB.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer(); gLCubeBuffer.put(TextureRotationUtil.CUBE_BAAB).position(0); FloatBuffer gLTextureBuffer = ByteBuffer .allocateDirect(TextureRotationUtil.TEXTURE_NO_ROTATION.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer(); gLTextureBuffer.put(TextureRotationUtil.getRotation(Rotation.ROTATION_90, true, false)).position(0); filter.onDrawFrame(textureId, gLCubeBuffer, gLTextureBuffer); }else { filter.onDrawFrame(textureId); } IntBuffer ib = IntBuffer.allocate(width * height); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib); Bitmap result = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); result.copyPixelsFromBuffer(IntBuffer.wrap(ib.array())); GLES20.glDeleteTextures(1, new int[]{textureId}, 0); GLES20.glDeleteFramebuffers(1, mFrameBuffers, 0); GLES20.glDeleteTextures(1, mFrameBufferTextures, 0); filter.onInputSizeChanged(displayWidth, displayHeight); return result; }
Example 15
Source File: EglSurfaceBase.java From grafika with Apache License 2.0 | 4 votes |
/** * Saves the EGL surface to a file. * <p> * Expects that this object's EGL surface is current. */ public void saveFrame(File file) throws IOException { if (!mEglCore.isCurrent(mEGLSurface)) { throw new RuntimeException("Expected EGL context/surface is not current"); } // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the // Bitmap "copy pixels" method wants the same format GL provides. // // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling // here often. // // Making this even more interesting is the upside-down nature of GL, which means // our output will look upside down relative to what appears on screen if the // typical GL conventions are used. String filename = file.toString(); int width = getWidth(); int height = getHeight(); ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); buf.order(ByteOrder.LITTLE_ENDIAN); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); GlUtil.checkGlError("glReadPixels"); buf.rewind(); BufferedOutputStream bos = null; try { bos = new BufferedOutputStream(new FileOutputStream(filename)); Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); bmp.copyPixelsFromBuffer(buf); bmp.compress(Bitmap.CompressFormat.PNG, 90, bos); bmp.recycle(); } finally { if (bos != null) bos.close(); } Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'"); }
Example 16
Source File: PhotoFilterView.java From TelePlus-Android with GNU General Public License v2.0 | 4 votes |
private void drawEnhancePass() { if (!hsvGenerated) { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, renderFrameBuffer[0]); GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTexture[0], 0); GLES20.glClear(0); GLES20.glUseProgram(rgbToHsvShaderProgram); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, renderTexture[1]); GLES20.glUniform1i(rgbToHsvSourceImageHandle, 0); GLES20.glEnableVertexAttribArray(rgbToHsvInputTexCoordHandle); GLES20.glVertexAttribPointer(rgbToHsvInputTexCoordHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); GLES20.glEnableVertexAttribArray(rgbToHsvPositionHandle); GLES20.glVertexAttribPointer(rgbToHsvPositionHandle, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); ByteBuffer hsvBuffer = ByteBuffer.allocateDirect(renderBufferWidth * renderBufferHeight * 4); GLES20.glReadPixels(0, 0, renderBufferWidth, renderBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, hsvBuffer); GLES20.glBindTexture(GL10.GL_TEXTURE_2D, enhanceTextures[0]); GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR); GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, renderBufferWidth, renderBufferHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, hsvBuffer); ByteBuffer buffer = null; try { buffer = ByteBuffer.allocateDirect(PGPhotoEnhanceSegments * PGPhotoEnhanceSegments * PGPhotoEnhanceHistogramBins * 4); Utilities.calcCDT(hsvBuffer, renderBufferWidth, renderBufferHeight, buffer); } catch (Exception e) { FileLog.e(e); } GLES20.glBindTexture(GL10.GL_TEXTURE_2D, enhanceTextures[1]); GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR); GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 256, 16, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer); hsvGenerated = true; } GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, renderFrameBuffer[1]); GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTexture[1], 0); GLES20.glClear(0); GLES20.glUseProgram(enhanceShaderProgram); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, enhanceTextures[0]); GLES20.glUniform1i(enhanceSourceImageHandle, 0); GLES20.glActiveTexture(GLES20.GL_TEXTURE1); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, enhanceTextures[1]); GLES20.glUniform1i(enhanceInputImageTexture2Handle, 1); if (showOriginal) { GLES20.glUniform1f(enhanceIntensityHandle, 0); } else { GLES20.glUniform1f(enhanceIntensityHandle, getEnhanceValue()); } GLES20.glEnableVertexAttribArray(enhanceInputTexCoordHandle); GLES20.glVertexAttribPointer(enhanceInputTexCoordHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); GLES20.glEnableVertexAttribArray(enhancePositionHandle); GLES20.glVertexAttribPointer(enhancePositionHandle, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); }
Example 17
Source File: OutputSurface.java From react-native-video-helper with MIT License | 4 votes |
public ByteBuffer getFrame() { mPixelBuf.rewind(); GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); return mPixelBuf; }
Example 18
Source File: BaseSurface.java From DeviceConnect-Android with MIT License | 2 votes |
/** * 指定したバッファにピクセルデータを格納します. * @param buffer ピクセルデータを格納するバッファ * @param w 横幅 * @param h 縦幅 */ public void readPixelBuffer(Buffer buffer, int w, int h) { GLES20.glReadPixels(0, 0, w, h, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer); }
Example 19
Source File: BitmapOutput.java From UltimateAndroid with Apache License 2.0 | votes |
@Override public void drawFrame() { if(frameBuffer == null) { if(getWidth() != 0 && getHeight() != 0) { initFBO(); } else { return; } } GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]); super.drawFrame(); int[] pixels = new int[getWidth()*getHeight()]; IntBuffer intBuffer = IntBuffer.wrap(pixels); intBuffer.position(0); GLES20.glReadPixels(0, 0, getWidth(), getHeight(), GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, intBuffer); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); for(int i = 0; i < pixels.length; i++) { pixels[i] = (pixels[i] & (0xFF00FF00)) | ((pixels[i] >> 16) & 0x000000FF) | ((pixels[i] << 16) & 0x00FF0000); //swap red and blue to translate back to bitmap rgb style } Bitmap image = Bitmap.createBitmap(pixels, getWidth(), getHeight(), Bitmap.Config.ARGB_8888); callback.bitmapCreated(image); }
Example 20
Source File: BitmapOutput.java From AndroidFastImageProcessing with MIT License | votes |
@Override public void drawFrame() { if(frameBuffer == null) { if(getWidth() != 0 && getHeight() != 0) { initFBO(); } else { return; } } GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]); super.drawFrame(); int[] pixels = new int[getWidth()*getHeight()]; IntBuffer intBuffer = IntBuffer.wrap(pixels); intBuffer.position(0); GLES20.glReadPixels(0, 0, getWidth(), getHeight(), GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, intBuffer); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); for(int i = 0; i < pixels.length; i++) { pixels[i] = (pixels[i] & (0xFF00FF00)) | ((pixels[i] >> 16) & 0x000000FF) | ((pixels[i] << 16) & 0x00FF0000); //swap red and blue to translate back to bitmap rgb style } Bitmap image = Bitmap.createBitmap(pixels, getWidth(), getHeight(), Bitmap.Config.ARGB_8888); callback.bitmapCreated(image); }