Java Code Examples for android.opengl.GLES20#glEnable()
The following examples show how to use
android.opengl.GLES20#glEnable() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PanoRender.java From Pano360 with MIT License | 6 votes |
@Override public void onDrawFrame(GL10 glUnused) { GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glFrontFace(GLES20.GL_CW); GLES20.glCullFace(GLES20.GL_BACK); GLES20.glEnable(GLES20.GL_CULL_FACE); if(!imageMode){ panoMediaPlayerWrapper.doTextureUpdate(((OESFilter)firstPassFilter).getSTMatrix()); } filterGroup.drawToFBO(0,fbo); if(fbo!=null) screenDrawer.onDrawFrame(fbo.getFrameBufferTextureId()); if (saveImg){ BitmapUtils.sendImage(surfaceWidth, surfaceHeight,statusHelper.getContext()); saveImg=false; } GLES20.glDisable(GLES20.GL_CULL_FACE); //GLES20.glFinish(); }
Example 2
Source File: TreasureHuntActivity.java From PanoramaGL with Apache License 2.0 | 6 votes |
/** * Draws a frame for an eye. * * @param eye The eye to render. Includes all required transformations. */ @Override public void onDrawEye(Eye eye) { GLES20.glEnable(GLES20.GL_DEPTH_TEST); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); checkGLError("colorParam"); // Apply the eye transformation to the camera. Matrix.multiplyMM(view, 0, eye.getEyeView(), 0, camera, 0); // Set the position of the light Matrix.multiplyMV(lightPosInEyeSpace, 0, view, 0, LIGHT_POS_IN_WORLD_SPACE, 0); // Build the ModelView and ModelViewProjection matrices // for calculating cube position and light. float[] perspective = eye.getPerspective(Z_NEAR, Z_FAR); Matrix.multiplyMM(modelView, 0, view, 0, modelCube, 0); Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0); drawCube(); // Set modelView for the floor, so we draw floor in the correct location Matrix.multiplyMM(modelView, 0, view, 0, modelFloor, 0); Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0); drawFloor(); }
Example 3
Source File: ModelRenderer.java From react-native-3d-model-view with MIT License | 6 votes |
@Override public void onSurfaceCreated(GL10 unused, EGLConfig config) { // Set the background frame color float[] backgroundColor = main.getModelActivity().getBackgroundColor(); GLES20.glClearColor(backgroundColor[0], backgroundColor[1], backgroundColor[2], backgroundColor[3]); // Use culling to remove back faces. // Don't remove back faces so we can see them // GLES20.glEnable(GLES20.GL_CULL_FACE); // Enable depth testing for hidden-surface elimination. GLES20.glEnable(GLES20.GL_DEPTH_TEST); // Enable blending for combining colors when there is transparency GLES20.glEnable(GLES20.GL_BLEND); GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA); // Lets create our 3D world components camera = new Camera(); // This component will draw the actual models using OpenGL drawer = new Object3DBuilder(); }
Example 4
Source File: ContinuousCaptureActivity.java From grafika with Apache License 2.0 | 6 votes |
/** * Adds a bit of extra stuff to the display just to give it flavor. */ private static void drawExtra(int frameNum, int width, int height) { // We "draw" with the scissor rect and clear calls. Note this uses window coordinates. int val = frameNum % 3; switch (val) { case 0: GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f); break; case 1: GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); break; case 2: GLES20.glClearColor(0.0f, 0.0f, 1.0f, 1.0f); break; } int xpos = (int) (width * ((frameNum % 100) / 100.0f)); GLES20.glEnable(GLES20.GL_SCISSOR_TEST); GLES20.glScissor(xpos, 0, width / 32, height / 32); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glDisable(GLES20.GL_SCISSOR_TEST); }
Example 5
Source File: WaterMarkFilter.java From SimpleVideoEditor with Apache License 2.0 | 5 votes |
@Override protected void onPreDraw() { super.onPreDraw(); // set view port to focus water mark. GLES20.glViewport( mWatermarkPosX, mWatermarkPosY, (int) (mWatermarkWidth * mScaleFactor), (int) (mWatermarkHeight * mScaleFactor) ); GLES20.glDisable(GLES20.GL_DEPTH_TEST); GLES20.glEnable(GLES20.GL_BLEND); GLES20.glBlendFunc(GLES20.GL_SRC_COLOR, GLES20.GL_DST_ALPHA); }
Example 6
Source File: GradientRect.java From GLEXP-Team-onebillion with Apache License 2.0 | 5 votes |
public void draw(OBRenderer renderer, float l, float t, float r, float b,float col1[],float col2[]) { fillOutRectVertexData(vertices,l,t,r,b,POSITION_COMPONENT_COUNT + RGB_COMPONENT_COUNT); fillOutRectColourData(vertices,col1,col2,POSITION_COMPONENT_COUNT + RGB_COMPONENT_COUNT); if (vertexArray == null) vertexArray = new VertexArray(vertices); else vertexArray.put(vertices); bindData((ColorShaderProgram) renderer.colourProgram); glBindTexture(GL_TEXTURE_2D,0); GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA); // GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); GLES20.glEnable(GLES20.GL_BLEND); glDrawArrays(GL_TRIANGLE_STRIP,0,4); }
Example 7
Source File: RenderFeature2.java From geoar-app with Apache License 2.0 | 5 votes |
public void render(float[] mvpMatrix) { /** sets the program object as part of current rendering state */ if (!isInitialized) return; renderer.useProgram(); if(enableBlending){ GLES20.glEnable(GLES20.GL_BLEND); } if(enableCullFace){ GLES20.glEnable(GLES20.GL_CULL_FACE); } if (texture != null) { // Set the active texture unit to texture unit 0. GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glUniform1i(renderer.getTextureUniform(), 0); // // Bind the texture to this unit. // GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, // textureDetails.textureId); texture.bindTexture(); } renderer.setModelViewProjectionMatrix(mvpMatrix); /** render the geometry of this feature */ if (geometry != null) geometry.render(); }
Example 8
Source File: TextureRect.java From GLEXP-Team-onebillion with Apache License 2.0 | 5 votes |
public void draw(OBRenderer renderer, float l, float t, float r, float b, Bitmap bitmap, Bitmap mask) { fillOutRectVertexData(vertices,l,t,r,b,POSITION_COMPONENT_COUNT + UV_COMPONENT_COUNT); fillOutRectTextureData(vertices,uvLeft,uvTop,uvRight,uvBottom,POSITION_COMPONENT_COUNT + UV_COMPONENT_COUNT); if (vertexArray == null) vertexArray = new VertexArray(vertices); else vertexArray.put(vertices); renderer.maskProgram.useProgram(); bindMaskData((MaskShaderProgram) renderer.maskProgram); glBindTexture(GL_TEXTURE_2D, renderer.textureObjectId(0)); GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA); // GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); GLES20.glEnable(GLES20.GL_BLEND); texImage2D(GL_TEXTURE_2D,0,bitmap,0); glBindTexture(GL_TEXTURE_2D, renderer.textureObjectId(1)); GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA); //GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); GLES20.glEnable(GLES20.GL_BLEND); texImage2D(GL_TEXTURE_2D,0,mask,0); glDrawArrays(GL_TRIANGLE_STRIP,0,4); }
Example 9
Source File: BackgroundRenderer.java From justaline-android with Apache License 2.0 | 5 votes |
/** * Draws the AR background image. The image will be drawn such that virtual content rendered * with the matrices provided by {@link Camera#getViewMatrix(float[], int)} and * {@link Camera#getProjectionMatrix(float[], int, float, float)} will accurately follow * static physical objects. This must be called <b>before</b> drawing virtual content. * * @param frame The last {@code Frame} returned by {@link Session#update()}. */ public void draw(Frame frame) { // We need to re-query the uv coordinates for the screen rect, as they may have // changed as well. frame.transformDisplayUvCoords(mQuadTexCoord, mQuadTexCoordTransformed); // No need to test or write depth, the screen quad has arbitrary depth, and is expected // to be drawn first. GLES20.glDisable(GLES20.GL_DEPTH_TEST); GLES20.glDepthMask(false); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); GLES20.glUseProgram(mQuadProgram); // Set the vertex positions. GLES20.glVertexAttribPointer( mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadVertices); // Set the texture coordinates. GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, mQuadTexCoordTransformed); // Enable vertex arrays GLES20.glEnableVertexAttribArray(mQuadPositionParam); GLES20.glEnableVertexAttribArray(mQuadTexCoordParam); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); // Disable vertex arrays GLES20.glDisableVertexAttribArray(mQuadPositionParam); GLES20.glDisableVertexAttribArray(mQuadTexCoordParam); // Restore the depth state for further drawing. GLES20.glDepthMask(true); GLES20.glEnable(GLES20.GL_DEPTH_TEST); ShaderUtil.checkGLError(TAG, "Draw"); }
Example 10
Source File: MyGLRenderer.java From poly-sample-android with Apache License 2.0 | 5 votes |
@Override public void onSurfaceCreated(GL10 unused, EGLConfig config) { GLES20.glClearColor(0.0f, 0.15f, 0.15f, 1.0f); GLES20.glEnable(GLES20.GL_DEPTH_TEST); lastFrameTime = System.currentTimeMillis(); myShader = new MyShader(); }
Example 11
Source File: PCRenderer.java From tango with MIT License | 5 votes |
@Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { GLES20.glClearColor(1f, 1f, 1f, 1.0f); GLES20.glEnable(GLES20.GL_DEPTH_TEST); mPointCloud = new PointCloud(mMaxDepthPoints); mGrid = new Grid(); mCameraFrustumAndAxis = new CameraFrustumAndAxis(); Matrix.setIdentityM(mViewMatrix, 0); Matrix.setLookAtM(mViewMatrix, 0, 5f, 5f, 5f, 0f, 0f, 0f, 0f, 1f, 0f); mCameraFrustumAndAxis.setModelMatrix(getModelMatCalculator() .getModelMatrix()); }
Example 12
Source File: GLState.java From tilt-game-android with MIT License | 5 votes |
/** * @return the previous state. */ public boolean enableBlend() { if (this.mBlendEnabled) { return true; } this.mBlendEnabled = true; GLES20.glEnable(GLES20.GL_BLEND); return false; }
Example 13
Source File: TextureMovieEncoder.java From AndroidPlayground with MIT License | 5 votes |
/** * Draws a box, with position offset. */ private void drawBox(int posn) { final int width = mInputWindowSurface.getWidth(); int xpos = (posn * 4) % (width - 50); GLES20.glEnable(GLES20.GL_SCISSOR_TEST); GLES20.glScissor(xpos, 0, 100, 100); GLES20.glClearColor(1.0f, 0.0f, 1.0f, 1.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glDisable(GLES20.GL_SCISSOR_TEST); }
Example 14
Source File: ImageHotspot.java From Pano360 with MIT License | 5 votes |
@Override public void onDrawFrame(int textureId) { GLES20.glEnable(GLES20.GL_BLEND); GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); glPassThroughProgram.use(); TextureUtils.bindTexture2D(bitmapTexture.getImageTextureId(), GLES20.GL_TEXTURE1,glPassThroughProgram.getTextureSamplerHandle(),1); imagePlane.uploadTexCoordinateBuffer(glPassThroughProgram.getTextureCoordinateHandle()); imagePlane.uploadVerticesBuffer(glPassThroughProgram.getPositionHandle()); updateMatrix(); GLES20.glUniformMatrix4fv(glPassThroughProgram.getMVPMatrixHandle(), 1, false, mMVPMatrix, 0); imagePlane.draw(); GLES20.glDisable(GLES20.GL_BLEND); }
Example 15
Source File: GPUProcessor.java From HoloKilo with GNU General Public License v3.0 | 4 votes |
public void drawStereo() { // Draw to screen in SBS for cardboard and so on. GLES20.glClearColor(0, 0, 0, 0); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glUseProgram(programFinal); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); if (Config.SHOW_DEBUG) { GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uploadTexture.textureId.get(0)); } else if (Config.SHOW_CAMERA) { GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture); } else if (Config.SHOW_RGBA_RESULT && doneFramebufferPool.size() > 0) { GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, doneFramebufferPool.get(0).textureId.get(0)); } else if (previousFb != null) { GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, previousFb.textureId.get(0)); } GLES20.glUniform1f(exposureHandle, exposureValue.get()); GLES20.glUniform1i(finalHandle, 0); GLES20.glUniform2f(pointHandle, (float) BlobFinder.getHitPoint()[0] / (float) subWidth, (float) BlobFinder.getHitPoint()[1] / (float) subHeight); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, textureVertexPointer[0]); GLES20.glEnableVertexAttribArray(textureCoordHandle[4]); GLES20.glVertexAttribPointer(textureCoordHandle[4], COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, 0); GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, drawListPointer[0]); float w = (float)screenWidth / 2f * hfov; float h = (float)screenHeight * vfov; float x1 = ((float)screenWidth / 2f - w) / 2f; float x2 = x1 + (float)screenWidth / 2f; float y = ((float)screenHeight - h) / 2f; for (int i = 0; i < 2; i++) { GLES20.glViewport(i == 0 ? (int)x1 : (int)x2, (int)y, (int)w, (int)h); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexPointer[0]); GLES20.glEnableVertexAttribArray(positionHandle[4]); GLES20.glVertexAttribPointer(positionHandle[4], COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, 0); GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, 0); } GLES20.glViewport(0, 0, screenWidth, screenHeight); GLES20.glUseProgram(0); if (Config.DRAW_CUBES) { CubeRenderer.setStates(true); try { semMatrix.acquire(); GLES20.glViewport(0, 0, screenWidth / 2, screenHeight); cubeRenderer.drawLeft(); GLES20.glViewport(screenWidth / 2, 0, screenWidth / 2, screenHeight); cubeRenderer.drawRight(); semMatrix.release(); } catch (InterruptedException e) { e.printStackTrace(); } CubeRenderer.setStates(false); } // Draw seperation line GLES20.glViewport(0, 0, screenWidth, screenHeight); GLES20.glScissor(screenWidth / 2, 0, 1, screenHeight); GLES20.glEnable(GLES20.GL_SCISSOR_TEST); GLES20.glClearColor(1f, 1f, 1f, 0.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glDisable(GLES20.GL_SCISSOR_TEST); }
Example 16
Source File: InstantCameraView.java From TelePlus-Android with GNU General Public License v2.0 | 4 votes |
private void handleVideoFrameAvailable(long timestampNanos, Integer cameraId) { try { drainEncoder(false); } catch (Exception e) { FileLog.e(e); } long dt, alphaDt; if (!lastCameraId.equals(cameraId)) { lastTimestamp = -1; lastCameraId = cameraId; } if (lastTimestamp == -1) { lastTimestamp = timestampNanos; if (currentTimestamp != 0) { dt = (System.currentTimeMillis() - lastCommitedFrameTime) * 1000000; alphaDt = 0; } else { alphaDt = dt = 0; } } else { alphaDt = dt = (timestampNanos - lastTimestamp); lastTimestamp = timestampNanos; } lastCommitedFrameTime = System.currentTimeMillis(); if (!skippedFirst) { skippedTime += dt; if (skippedTime < 200000000) { return; } skippedFirst = true; } currentTimestamp += dt; if (videoFirst == -1) { videoFirst = timestampNanos / 1000; if (BuildVars.LOGS_ENABLED) { FileLog.d("first video frame was at " + videoFirst); } } videoLast = timestampNanos; GLES20.glUseProgram(drawProgram); GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false, 12, vertexBuffer); GLES20.glEnableVertexAttribArray(positionHandle); GLES20.glVertexAttribPointer(textureHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); GLES20.glEnableVertexAttribArray(textureHandle); GLES20.glUniform1f(scaleXHandle, scaleX); GLES20.glUniform1f(scaleYHandle, scaleY); GLES20.glUniformMatrix4fv(vertexMatrixHandle, 1, false, mMVPMatrix, 0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); if (oldCameraTexture[0] != 0) { if (!blendEnabled) { GLES20.glEnable(GLES20.GL_BLEND); blendEnabled = true; } GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, moldSTMatrix, 0); GLES20.glUniform1f(alphaHandle, 1.0f); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oldCameraTexture[0]); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, mSTMatrix, 0); GLES20.glUniform1f(alphaHandle, cameraTextureAlpha); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); GLES20.glDisableVertexAttribArray(positionHandle); GLES20.glDisableVertexAttribArray(textureHandle); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); GLES20.glUseProgram(0); EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, currentTimestamp); EGL14.eglSwapBuffers(eglDisplay, eglSurface); if (oldCameraTexture[0] != 0 && cameraTextureAlpha < 1.0f) { cameraTextureAlpha += alphaDt / 200000000.0f; if (cameraTextureAlpha > 1) { GLES20.glDisable(GLES20.GL_BLEND); blendEnabled = false; cameraTextureAlpha = 1; GLES20.glDeleteTextures(1, oldCameraTexture, 0); oldCameraTexture[0] = 0; if (!cameraReady) { cameraReady = true; } } } else if (!cameraReady) { cameraReady = true; } }
Example 17
Source File: TextureViewGLActivity.java From grafika with Apache License 2.0 | 4 votes |
/** * Draws updates as fast as the system will allow. * <p> * In 4.4, with the synchronous buffer queue queue, the frame rate will be limited. * In previous (and future) releases, with the async queue, many of the frames we * render may be dropped. * <p> * The correct thing to do here is use Choreographer to schedule frame updates off * of vsync, but that's not nearly as much fun. */ private void doAnimation(WindowSurface eglSurface) { final int BLOCK_WIDTH = 80; final int BLOCK_SPEED = 2; float clearColor = 0.0f; int xpos = -BLOCK_WIDTH / 2; int xdir = BLOCK_SPEED; int width = eglSurface.getWidth(); int height = eglSurface.getHeight(); Log.d(TAG, "Animating " + width + "x" + height + " EGL surface"); while (true) { // Check to see if the TextureView's SurfaceTexture is still valid. synchronized (mLock) { SurfaceTexture surfaceTexture = mSurfaceTexture; if (surfaceTexture == null) { Log.d(TAG, "doAnimation exiting"); return; } } // Still alive, render a frame. GLES20.glClearColor(clearColor, clearColor, clearColor, 1.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glEnable(GLES20.GL_SCISSOR_TEST); GLES20.glScissor(xpos, height / 4, BLOCK_WIDTH, height / 2); GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glDisable(GLES20.GL_SCISSOR_TEST); // Publish the frame. If we overrun the consumer, frames will be dropped, // so on a sufficiently fast device the animation will run at faster than // the display refresh rate. // // If the SurfaceTexture has been destroyed, this will throw an exception. eglSurface.swapBuffers(); // Advance state clearColor += 0.015625f; if (clearColor > 1.0f) { clearColor = 0.0f; } xpos += xdir; if (xpos <= -BLOCK_WIDTH / 2 || xpos >= width - BLOCK_WIDTH / 2) { Log.d(TAG, "change direction"); xdir = -xdir; } } }
Example 18
Source File: BackgroundRenderer.java From poly-sample-android with Apache License 2.0 | 4 votes |
/** * Draws the AR background image. The image will be drawn such that virtual content rendered with * the matrices provided by {@link com.google.ar.core.Camera#getViewMatrix(float[], int)} and * {@link com.google.ar.core.Camera#getProjectionMatrix(float[], int, float, float)} will * accurately follow static physical objects. This must be called <b>before</b> drawing virtual * content. * * @param frame The last {@code Frame} returned by {@link Session#update()}. */ public void draw(Frame frame) { // If display rotation changed (also includes view size change), we need to re-query the uv // coordinates for the screen rect, as they may have changed as well. if (frame.hasDisplayGeometryChanged()) { frame.transformDisplayUvCoords(quadTexCoord, quadTexCoordTransformed); } // No need to test or write depth, the screen quad has arbitrary depth, and is expected // to be drawn first. GLES20.glDisable(GLES20.GL_DEPTH_TEST); GLES20.glDepthMask(false); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId); GLES20.glUseProgram(quadProgram); // Set the vertex positions. GLES20.glVertexAttribPointer( quadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadVertices); // Set the texture coordinates. GLES20.glVertexAttribPointer( quadTexCoordParam, TEXCOORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, quadTexCoordTransformed); // Enable vertex arrays GLES20.glEnableVertexAttribArray(quadPositionParam); GLES20.glEnableVertexAttribArray(quadTexCoordParam); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); // Disable vertex arrays GLES20.glDisableVertexAttribArray(quadPositionParam); GLES20.glDisableVertexAttribArray(quadTexCoordParam); // Restore the depth state for further drawing. GLES20.glDepthMask(true); GLES20.glEnable(GLES20.GL_DEPTH_TEST); ShaderUtil.checkGLError(TAG, "Draw"); }
Example 19
Source File: StarWarsRenderer.java From StarWars.Android with MIT License | 4 votes |
@Override public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) { GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); // Use culling to remove back faces. GLES20.glEnable(GLES20.GL_CULL_FACE); GLES20.glFrontFace(GLES20.GL_CW); // Enable depth testing GLES20.glEnable(GLES20.GL_DEPTH_TEST); // Position the eye in front of the origin. final float eyeX = 0.0f; final float eyeY = 0.0f; final float eyeZ = 0.0f; // We are looking toward the distance final float lookX = 0.0f; final float lookY = 0.0f; final float lookZ = 1.0f; // Set our up vector. This is where our head would be pointing were we holding the camera. final float upX = 0.0f; final float upY = 1.0f; final float upZ = 0.0f; Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ); final String vertexShader = RawResourceReader.readTextFileFromRawResource(mGlSurfaceView.getContext(), R.raw.tiles_vert); final String fragmentShader = RawResourceReader.readTextFileFromRawResource(mGlSurfaceView.getContext(), R.raw.tiles_frag); final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader); final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader); programHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle, new String[]{"a_Position", "a_Normal", "a_TexCoordinate"}); // Initialize the accumulated rotation matrix Matrix.setIdentityM(mAccumulatedRotation, 0); }
Example 20
Source File: MultiSurfaceActivity.java From pause-resume-video-recording with Apache License 2.0 | 4 votes |
/** * Clears the surface, then draws some alpha-blended rectangles with GL. * <p> * Creates a temporary EGL context just for the duration of the call. */ private void drawRectSurface(Surface surface, int left, int top, int width, int height) { EglCore eglCore = new EglCore(); WindowSurface win = new WindowSurface(eglCore, surface, false); win.makeCurrent(); GLES20.glClearColor(0, 0, 0, 0); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glEnable(GLES20.GL_SCISSOR_TEST); for (int i = 0; i < 4; i++) { int x, y, w, h; if (width < height) { // vertical w = width / 4; h = height; x = left + w * i; y = top; } else { // horizontal w = width; h = height / 4; x = left; y = top + h * i; } GLES20.glScissor(x, y, w, h); switch (i) { case 0: // 50% blue at 25% alpha, pre-multiplied GLES20.glClearColor(0.0f, 0.0f, 0.125f, 0.25f); break; case 1: // 100% blue at 25% alpha, pre-multiplied GLES20.glClearColor(0.0f, 0.0f, 0.25f, 0.25f); break; case 2: // 200% blue at 25% alpha, pre-multiplied (should get clipped) GLES20.glClearColor(0.0f, 0.0f, 0.5f, 0.25f); break; case 3: // 100% white at 25% alpha, pre-multiplied GLES20.glClearColor(0.25f, 0.25f, 0.25f, 0.25f); break; } GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); } GLES20.glDisable(GLES20.GL_SCISSOR_TEST); win.swapBuffers(); win.release(); eglCore.release(); }