Java Code Examples for android.opengl.GLES20#glFrontFace()
The following examples show how to use
android.opengl.GLES20#glFrontFace() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PanoRender.java From Pano360 with MIT License | 6 votes |
@Override public void onDrawFrame(GL10 glUnused) { GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glFrontFace(GLES20.GL_CW); GLES20.glCullFace(GLES20.GL_BACK); GLES20.glEnable(GLES20.GL_CULL_FACE); if(!imageMode){ panoMediaPlayerWrapper.doTextureUpdate(((OESFilter)firstPassFilter).getSTMatrix()); } filterGroup.drawToFBO(0,fbo); if(fbo!=null) screenDrawer.onDrawFrame(fbo.getFrameBufferTextureId()); if (saveImg){ BitmapUtils.sendImage(surfaceWidth, surfaceHeight,statusHelper.getContext()); saveImg=false; } GLES20.glDisable(GLES20.GL_CULL_FACE); //GLES20.glFinish(); }
Example 2
Source File: CubeRenderer.java From HoloKilo with GNU General Public License v3.0 | 6 votes |
public static void setStates(boolean state) { if (state) { GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glEnable(GLES20.GL_DEPTH_TEST); GLES20.glDepthFunc(GLES20.GL_LESS); GLES20.glFrontFace(GLES20.GL_CCW); GLES20.glEnable(GLES20.GL_CULL_FACE); GLES20.glCullFace(GLES20.GL_FRONT); GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT); } else { GLES20.glDisable(GLES20.GL_DEPTH_TEST); GLES20.glDisable(GLES20.GL_CULL_FACE); GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0); GLES20.glUseProgram(0); } }
Example 3
Source File: ParticleSystemRenderer.java From StarWars.Android with MIT License | 5 votes |
@Override public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) { GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); // Use culling to remove back faces. GLES20.glEnable(GLES20.GL_CULL_FACE); GLES20.glFrontFace(GLES20.GL_CW); // Enable depth testing GLES20.glEnable(GLES20.GL_DEPTH_TEST); // Position the eye in front of the origin. final float eyeX = 0.0f; final float eyeY = 0.0f; final float eyeZ = 0.0f; // We are looking toward the distance final float lookX = 0.0f; final float lookY = 0.0f; final float lookZ = 1.0f; // Set our up vector. This is where our head would be pointing were we holding the camera. final float upX = 0.0f; final float upY = 1.0f; final float upZ = 0.0f; Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ); final String vertexShader = RawResourceReader.readTextFileFromRawResource(App.getAppContext(), R.raw.star_vert); final String fragmentShader = RawResourceReader.readTextFileFromRawResource(App.getAppContext(), R.raw.star_frag); final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader); final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader); programHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle, new String[]{"a_Position", "a_TexCoordinate", "a_TileXY"}); }
Example 4
Source File: CameraViewModel.java From VIA-AI with MIT License | 4 votes |
public void draw(int index, boolean fourInOne) { if(!mIsRendering) return; // Check is opengl init or not. checkAndInitGL(); // Check is data updated or not. checkDataUpdate(); GLES20.glUseProgram(this.mProgram); GLUtility.checkGlError("glUseProgram"); // Set the face rotation GLES20.glFrontFace(GLES20.GL_CW); // get handle to vertex shader's vPosition member int mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition"); GLES20.glEnableVertexAttribArray(mPositionHandle); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mPositionVBO[0]); GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false,0, 0); // Get handle to mTextureCoordinate coordinates location int mTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "aTexCoord"); GLES20.glEnableVertexAttribArray(mTexCoordHandle); GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mTextureCoordinateVBO[0]); GLES20.glVertexAttribPointer(mTexCoordHandle, 2, GLES20.GL_FLOAT, false,0, 0); // get handle to shape's transformation matrix int mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMvpMatrix, 0); // Get handle to mTextureObject locations int mTexSamplerHandle = GLES20.glGetUniformLocation(mProgram, "sTexture"); GLES20.glUniform1i(mTexSamplerHandle, 0); GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, mIndexVBO[0]); GLES20.glDrawElements(GLES20.GL_TRIANGLE_STRIP, mIndex.length, GLES20.GL_UNSIGNED_SHORT, 0); GLUtility.checkGlError("glDrawElements"); // Disable vertex array GLES20.glDisableVertexAttribArray(mPositionHandle); GLES20.glDisableVertexAttribArray(mTexCoordHandle); }
Example 5
Source File: SingleEGLImageModel.java From VIA-AI with MIT License | 4 votes |
public void draw(int index, boolean fourInOne, boolean b1x4,boolean luminance) { Matrix.orthoM(projectMatrix, 0, -1, 1, -1, 1, -1, 1); Matrix.setIdentityM(viewMatrix, 0); Matrix.multiplyMM(mvpMatrix, 0, projectMatrix, 0, viewMatrix, 0); if (luminance == true) { GLES20.glUseProgram(this.mProgram_Luminance); GLUtility.checkGlError("glUseProgram"); } else { GLES20.glUseProgram(this.mProgram); GLUtility.checkGlError("glUseProgram"); } // Set the face rotation GLES20.glFrontFace(GLES20.GL_CW); GLUtility.checkGlError("glFrontFace"); // get handle to vertex shader's vPosition member int mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition"); GLUtility.checkGlError("glGetAttribLocation"); // Enable a handle to the triangle vertices GLES20.glEnableVertexAttribArray(mPositionHandle); GLUtility.checkGlError("glEnableVertexAttribArray"); // Bind VBO Position GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mPositionVBO[0]); GLUtility.checkGlError("glBindBuffer"); GLES20.glVertexAttribPointer( mPositionHandle, 3, GLES20.GL_FLOAT, false, 0, 0); GLUtility.checkGlError("glVertexAttribPointer"); // Get handle to mTextureCoordinate coordinates location int mTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "aTexCoord"); GLUtility.checkGlError("glGetAttribLocation"); // Enable generic vertex attribute array GLES20.glEnableVertexAttribArray(mTexCoordHandle); GLUtility.checkGlError("glEnableVertexAttribArray"); // Bind VBO Texture Coordinate if (fourInOne == false) { GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mTextureCoordinateVBO[0]); GLUtility.checkGlError("glBindBuffer"); } else { if(b1x4) { GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mTextureCoordinateMergeVBO1x4[index]); } else { GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mTextureCoordinateMergeVBO2x2[index]); } GLUtility.checkGlError("glBindBuffer"); } GLES20.glVertexAttribPointer( mTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, 0); GLUtility.checkGlError("glVertexAttribPointer"); // get handle to shape's transformation matrix int mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); GLUtility.checkGlError("glGetUniformLocation"); // Apply the projection and view transformation GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0); GLUtility.checkGlError("glUniformMatrix4fv"); // Get handle to mTextureObject locations int mTexSamplerHandle = GLES20.glGetUniformLocation(mProgram, "sTexture"); GLUtility.checkGlError("glGetUniformLocation"); // Set the sampler mTextureCoordinate unit to 0, where we have saved the mTextureCoordinate. GLES20.glUniform1i(mTexSamplerHandle, 0); GLUtility.checkGlError("glUniform1i"); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, mPosition.length / 3); GLUtility.checkGlError("glDrawArrays"); // Disable vertex array GLES20.glDisableVertexAttribArray(mPositionHandle); GLUtility.checkGlError("glDisableVertexAttribArray"); GLES20.glDisableVertexAttribArray(mTexCoordHandle); GLUtility.checkGlError("glDisableVertexAttribArray"); }
Example 6
Source File: CustomImageModel.java From VIA-AI with MIT License | 4 votes |
public void draw(int index, boolean fourInOne, boolean luminance) { if(mIsRendering == false) return; // Check is opengl init or not. checkAndInitGL(); // Check is data updated or not. checkDataUpdate(); Matrix.orthoM(mProjectMatrix, 0, -1, 1, -1, 1, -1, 1); Matrix.setIdentityM(mViewMatrix, 0); Matrix.multiplyMM(mMvpMatrix, 0, mProjectMatrix, 0, mViewMatrix, 0); if (luminance == true) { GLES20.glUseProgram(this.mProgram_Luminance); GLUtility.checkGlError("glUseProgram"); } else { GLES20.glUseProgram(this.mProgram); GLUtility.checkGlError("glUseProgram"); } // Set the face rotation GLES20.glFrontFace(GLES20.GL_CW); GLUtility.checkGlError("glFrontFace"); // get handle to vertex shader's vPosition member int mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition"); GLUtility.checkGlError("glGetAttribLocation"); // Enable a handle to the triangle vertices GLES20.glEnableVertexAttribArray(mPositionHandle); GLUtility.checkGlError("glEnableVertexAttribArray"); // Bind VBO Position GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mPositionVBO[0]); GLUtility.checkGlError("glBindBuffer"); GLES20.glVertexAttribPointer( mPositionHandle, 3, GLES20.GL_FLOAT, false, 0, 0); GLUtility.checkGlError("glVertexAttribPointer"); // Get handle to mTextureCoordinate coordinates location int mTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "aTexCoord"); GLUtility.checkGlError("glGetAttribLocation"); // Enable generic vertex attribute array GLES20.glEnableVertexAttribArray(mTexCoordHandle); GLUtility.checkGlError("glEnableVertexAttribArray"); // Bind VBO Texture Coordinate GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mTextureCoordinateVBO[0]); GLUtility.checkGlError("glBindBuffer"); GLES20.glVertexAttribPointer( mTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, 0); GLUtility.checkGlError("glVertexAttribPointer"); // get handle to shape's transformation matrix int mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); GLUtility.checkGlError("glGetUniformLocation"); // Apply the projection and view transformation GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMvpMatrix, 0); GLUtility.checkGlError("glUniformMatrix4fv"); // Get handle to mTextureObject locations int mTexSamplerHandle = GLES20.glGetUniformLocation(mProgram, "sTexture"); GLUtility.checkGlError("glGetUniformLocation"); // Set the sampler mTextureCoordinate unit to 0, where we have saved the mTextureCoordinate. GLES20.glUniform1i(mTexSamplerHandle, 0); GLUtility.checkGlError("glUniform1i"); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, mPosition.length / 3); GLUtility.checkGlError("glDrawArrays"); // Disable vertex array GLES20.glDisableVertexAttribArray(mPositionHandle); GLUtility.checkGlError("glDisableVertexAttribArray"); GLES20.glDisableVertexAttribArray(mTexCoordHandle); GLUtility.checkGlError("glDisableVertexAttribArray"); }
Example 7
Source File: AppRenderer.java From VuforiaLibGDX with MIT License | 4 votes |
public TrackableResult[] render() { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); State state; // Get our current state state = TrackerManager.getInstance().getStateUpdater().updateState(); mRenderer.begin(state); // We must detect if background reflection is active and adjust the // culling direction. // If the reflection is active, this means the post matrix has been // reflected as well, // therefore standard counter clockwise face culling will result in // "inside out" models. if (Renderer.getInstance().getVideoBackgroundConfig().getReflection() == VIDEO_BACKGROUND_REFLECTION.VIDEO_BACKGROUND_REFLECTION_ON) GLES20.glFrontFace(GLES20.GL_CW); // Front camera else GLES20.glFrontFace(GLES20.GL_CCW); // Back camera // We get a list of views which depend on the mode we are working on, for mono we have // only one view, in stereo we have three: left, right and postprocess ViewList viewList = mRenderingPrimitives.getRenderingViews(); TrackableResult[] results = null; // Cycle through the view list for (int v = 0; v < viewList.getNumViews(); v++) { // Get the view id int viewID = viewList.getView(v); Vec4I viewport; // Get the viewport for that specific view viewport = mRenderingPrimitives.getViewport(viewID); // Set viewport for current view GLES20.glViewport(viewport.getData()[0], viewport.getData()[1], viewport.getData()[2], viewport.getData()[3]); // Set scissor GLES20.glScissor(viewport.getData()[0], viewport.getData()[1], viewport.getData()[2], viewport.getData()[3]); // Get projection matrix for the current view. Matrix34F projMatrix = mRenderingPrimitives.getProjectionMatrix(viewID, state.getCameraCalibration()); // Create GL matrix setting up the near and far planes float rawProjectionMatrixGL[] = Tool.convertPerspectiveProjection2GLMatrix( projMatrix, mNearPlane, mFarPlane) .getData(); // Apply the appropriate eye adjustment to the raw projection matrix, and assign to the global variable float eyeAdjustmentGL[] = Tool.convert2GLMatrix(mRenderingPrimitives .getEyeDisplayAdjustmentMatrix(viewID)).getData(); float projectionMatrix[] = new float[16]; // Apply the adjustment to the projection matrix Matrix.multiplyMM(projectionMatrix, 0, rawProjectionMatrixGL, 0, eyeAdjustmentGL, 0); currentView = viewID; // Call renderFrame from the app renderer class which implements SampleAppRendererControl // This will be called for MONO, LEFT and RIGHT views, POSTPROCESS will not render the // frame if(currentView != VIEW.VIEW_POSTPROCESS) { results = mRenderingInterface.renderFrame(state, projectionMatrix); } } mRenderer.end(); return results; }
Example 8
Source File: StarWarsRenderer.java From StarWars.Android with MIT License | 4 votes |
@Override public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) { GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); // Use culling to remove back faces. GLES20.glEnable(GLES20.GL_CULL_FACE); GLES20.glFrontFace(GLES20.GL_CW); // Enable depth testing GLES20.glEnable(GLES20.GL_DEPTH_TEST); // Position the eye in front of the origin. final float eyeX = 0.0f; final float eyeY = 0.0f; final float eyeZ = 0.0f; // We are looking toward the distance final float lookX = 0.0f; final float lookY = 0.0f; final float lookZ = 1.0f; // Set our up vector. This is where our head would be pointing were we holding the camera. final float upX = 0.0f; final float upY = 1.0f; final float upZ = 0.0f; Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ); final String vertexShader = RawResourceReader.readTextFileFromRawResource(mGlSurfaceView.getContext(), R.raw.tiles_vert); final String fragmentShader = RawResourceReader.readTextFileFromRawResource(mGlSurfaceView.getContext(), R.raw.tiles_frag); final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader); final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader); programHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle, new String[]{"a_Position", "a_Normal", "a_TexCoordinate"}); // Initialize the accumulated rotation matrix Matrix.setIdentityM(mAccumulatedRotation, 0); }
Example 9
Source File: ImageTargetRenderer.java From cordova-plugin-vuforia with MIT License | 4 votes |
private void renderFrame() { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); State state = mRenderer.begin(); mRenderer.drawVideoBackground(); GLES20.glEnable(GLES20.GL_DEPTH_TEST); // handle face culling, we need to detect if we are using reflection // to determine the direction of the culling GLES20.glEnable(GLES20.GL_CULL_FACE); GLES20.glCullFace(GLES20.GL_BACK); if (Renderer.getInstance().getVideoBackgroundConfig().getReflection() == VIDEO_BACKGROUND_REFLECTION.VIDEO_BACKGROUND_REFLECTION_ON) GLES20.glFrontFace(GLES20.GL_CW); // Front camera else GLES20.glFrontFace(GLES20.GL_CCW); // Back camera // did we find any trackables this frame? for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { TrackableResult result = state.getTrackableResult(tIdx); Trackable trackable = result.getTrackable(); String obj_name = trackable.getName(); Log.d(LOGTAG, "MRAY :: Found: " + obj_name); /** * Our targets array has been flattened to a string so will equal something like: ["one", "two"] * So, to stop weak matches such as 'two' within ["onetwothree", "two"] we wrap the term in * speech marks such as '"two"' **/ Boolean looking_for = mTargets.toLowerCase().contains("\"" + obj_name.toLowerCase() + "\""); if (looking_for) { mActivity.imageFound(obj_name); } } GLES20.glDisable(GLES20.GL_DEPTH_TEST); mRenderer.end(); }
Example 10
Source File: AndroidGL.java From trekarta with GNU General Public License v3.0 | 4 votes |
@Override public void frontFace(int mode) { GLES20.glFrontFace(mode); }