Java Code Examples for android.opengl.Matrix#multiplyMV()
The following examples show how to use
android.opengl.Matrix#multiplyMV() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MainActivity.java From Cardboard with Apache License 2.0 | 6 votes |
/** * Check if user is looking at object by calculating where the object is in eye-space. * @return */ private boolean isLookingAtObject() { float[] initVec = {0, 0, 0, 1.0f}; float[] objPositionVec = new float[4]; // Convert object space to camera space. Use the headView from onNewFrame. Matrix.multiplyMM(mModelView, 0, mHeadView, 0, mModelCube, 0); Matrix.multiplyMV(objPositionVec, 0, mModelView, 0, initVec, 0); float pitch = (float)Math.atan2(objPositionVec[1], -objPositionVec[2]); float yaw = (float)Math.atan2(objPositionVec[0], -objPositionVec[2]); Log.i(TAG, "Object position: X: " + objPositionVec[0] + " Y: " + objPositionVec[1] + " Z: " + objPositionVec[2]); Log.i(TAG, "Object Pitch: " + pitch +" Yaw: " + yaw); return (Math.abs(pitch) < PITCH_LIMIT) && (Math.abs(yaw) < YAW_LIMIT); }
Example 2
Source File: TreasureHuntActivity.java From PanoramaGL with Apache License 2.0 | 6 votes |
/** * Draws a frame for an eye. * * @param eye The eye to render. Includes all required transformations. */ @Override public void onDrawEye(Eye eye) { GLES20.glEnable(GLES20.GL_DEPTH_TEST); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); checkGLError("colorParam"); // Apply the eye transformation to the camera. Matrix.multiplyMM(view, 0, eye.getEyeView(), 0, camera, 0); // Set the position of the light Matrix.multiplyMV(lightPosInEyeSpace, 0, view, 0, LIGHT_POS_IN_WORLD_SPACE, 0); // Build the ModelView and ModelViewProjection matrices // for calculating cube position and light. float[] perspective = eye.getPerspective(Z_NEAR, Z_FAR); Matrix.multiplyMM(modelView, 0, view, 0, modelCube, 0); Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0); drawCube(); // Set modelView for the floor, so we draw floor in the correct location Matrix.multiplyMM(modelView, 0, view, 0, modelFloor, 0); Matrix.multiplyMM(modelViewProjection, 0, perspective, 0, modelView, 0); drawFloor(); }
Example 3
Source File: Vector2.java From Tanks with MIT License | 5 votes |
public Vector3 toVector3(Plane plane) { Vector3 mtv3 = new Vector3(getX(), getY(), 0); Vector3 planeX = plane.xAxis(); Vector3 planeY = plane.yAxis(); Vector3 planeZ = plane.zAxis(); float[] matrix = new float[16]; matrix[0] = planeX.getX(); matrix[1] = planeX.getY(); matrix[2] = planeX.getZ(); matrix[4] = planeY.getX(); matrix[5] = planeY.getY(); matrix[6] = planeY.getZ(); matrix[8] = planeZ.getX(); matrix[9] = planeZ.getY(); matrix[10] = planeZ.getZ(); matrix[15] = 1.0f; Matrix.multiplyMV(mtv3.getRaw(), 0, matrix, 0, mtv3.getRaw(), 0); mtv3.normalize(); return mtv3; }
Example 4
Source File: AROverlayView.java From ar-location-based-android with MIT License | 5 votes |
@Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (currentLocation == null) { return; } final int radius = 30; Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); paint.setStyle(Paint.Style.FILL); paint.setColor(Color.WHITE); paint.setTypeface(Typeface.create(Typeface.DEFAULT, Typeface.NORMAL)); paint.setTextSize(60); for (int i = 0; i < arPoints.size(); i ++) { float[] currentLocationInECEF = LocationHelper.WSG84toECEF(currentLocation); float[] pointInECEF = LocationHelper.WSG84toECEF(arPoints.get(i).getLocation()); float[] pointInENU = LocationHelper.ECEFtoENU(currentLocation, currentLocationInECEF, pointInECEF); float[] cameraCoordinateVector = new float[4]; Matrix.multiplyMV(cameraCoordinateVector, 0, rotatedProjectionMatrix, 0, pointInENU, 0); // cameraCoordinateVector[2] is z, that always less than 0 to display on right position // if z > 0, the point will display on the opposite if (cameraCoordinateVector[2] < 0) { float x = (0.5f + cameraCoordinateVector[0]/cameraCoordinateVector[3]) * canvas.getWidth(); float y = (0.5f - cameraCoordinateVector[1]/cameraCoordinateVector[3]) * canvas.getHeight(); canvas.drawCircle(x, y, radius, paint); canvas.drawText(arPoints.get(i).getName(), x - (30 * arPoints.get(i).getName().length() / 2), y - 80, paint); } } }
Example 5
Source File: CameraMatrix.java From MegviiFacepp-Android-SDK with Apache License 2.0 | 5 votes |
/** * 图像旋转 */ private float[] transformTextureCoordinates(float[] coords, float[] matrix) { float[] result = new float[coords.length]; float[] vt = new float[4]; for (int i = 0; i < coords.length; i += 2) { float[] v = { coords[i], coords[i + 1], 0, 1 }; // for (int j = 0; j < v.length; j ++) { // Log.w("ceshi", "v[" + j + "]======" + coords[j]); // } Matrix.multiplyMV(vt, 0, matrix, 0, v, 0); result[i] = vt[0];// x轴镜像 // result[i + 1] = vt[1];y轴镜像 result[i + 1] = coords[i + 1]; } // // for (int i = 0; i < coords.length; i ++) { // Log.w("ceshi", "coords[" + i + "]======" + coords[i]); // } // // for (int i = 0; i < result.length / 2; i ++) { // Log.w("ceshi", "result[" + i + "]======" + result[i]); // } // [0.0, 1.0, 1.0, 1.0]; v // [0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0]; coords // [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0]; result return result; }
Example 6
Source File: Vector.java From libcommon with Apache License 2.0 | 5 votes |
/** * ベクトルを回転(スレッドセーフではない) * x軸:(1,0,0), y軸(0,1,0), z軸(0,0,1) * @param angle [度] * @param axisX * @param axisY * @param axisZ * @return */ public Vector rotate(final float angle, final float axisX, final float axisY, final float axisZ) { inVec[0] = x; inVec[1] = y; inVec[2] = z; inVec[3] = 1; Matrix.setIdentityM(matrix, 0); Matrix.rotateM(matrix, 0, angle, axisX, axisY, axisZ); Matrix.multiplyMV(outVec, 0, matrix, 0, inVec, 0); x = outVec[0]; y = outVec[1]; z = outVec[2]; return this; }
Example 7
Source File: GeometryLoader.java From react-native-3d-model-view with MIT License | 5 votes |
private void readNormals(XmlNode meshData, String normalsId) { XmlNode normalsData = meshData.getChildWithAttribute("source", "id", normalsId).getChild("float_array"); int count = Integer.parseInt(normalsData.getAttribute("count")); String[] normData = normalsData.getData().trim().split("\\s+"); for (int i = 0; i < count/3; i++) { float x = Float.parseFloat(normData[i * 3]); float y = Float.parseFloat(normData[i * 3 + 1]); float z = Float.parseFloat(normData[i * 3 + 2]); Vector4f norm = new Vector4f(x, y, z, 0f); float[] normV = new float[4]; Matrix.multiplyMV(normV, 0, CORRECTION, 0, norm.toArray(), 0); norm = new Vector4f(normV); normals.add(new Vector3f(norm.x, norm.y, norm.z)); } }
Example 8
Source File: GeometryLoader.java From react-native-3d-model-view with MIT License | 5 votes |
private void readPositions(XmlNode meshData, String geometryId) { String positionsId = meshData.getChild("vertices").getChild("input").getAttribute("source").substring(1); XmlNode positionsData = meshData.getChildWithAttribute("source", "id", positionsId).getChild("float_array"); int count = Integer.parseInt(positionsData.getAttribute("count")); String[] posData = positionsData.getData().trim().split("\\s+"); for (int i = 0; i < count / 3; i++) { float x = Float.parseFloat(posData[i * 3]); float y = Float.parseFloat(posData[i * 3 + 1]); float z = Float.parseFloat(posData[i * 3 + 2]); Vector4f position = new Vector4f(x, y, z, 1); float[] positionV = new float[4]; Matrix.multiplyMV(positionV, 0, CORRECTION, 0, position.toArray(), 0); position = new Vector4f(positionV); VertexSkinData weightsData = skinningDataMap != null && skinningDataMap.containsKey(geometryId) ? skinningDataMap.get(geometryId).verticesSkinData.get(vertices.size()) : null; if (weightsData == null && skeletonData != null){ JointData jointData = getJointData(skeletonData.headJoint, geometryId); if (jointData != null) { weightsData = new VertexSkinData(); weightsData.addJointEffect(jointData.index, 1); weightsData.limitJointNumber(3); } } vertices.add(new Vertex(vertices.size(), new Vector3f(position.x, position.y, position.z), weightsData)); } Log.i("GeometryLoader", "Vertex count: " + vertices.size()); }
Example 9
Source File: Projector.java From codeexamples-android with Eclipse Public License 1.0 | 5 votes |
public void project(float[] obj, int objOffset, float[] win, int winOffset) { if (!mMVPComputed) { Matrix.multiplyMM(mMVP, 0, mGrabber.mProjection, 0, mGrabber.mModelView, 0); mMVPComputed = true; } Matrix.multiplyMV(mV, 0, mMVP, 0, obj, objOffset); float rw = 1.0f / mV[3]; win[winOffset] = mX + mViewWidth * (mV[0] * rw + 1.0f) * 0.5f; win[winOffset + 1] = mY + mViewHeight * (mV[1] * rw + 1.0f) * 0.5f; win[winOffset + 2] = (mV[2] * rw + 1.0f) * 0.5f; }
Example 10
Source File: CameraSurfaceView.java From Paddle-Lite-Demo with Apache License 2.0 | 5 votes |
private float[] transformTextureCoordinates(float[] coords, float[] matrix) { float[] result = new float[coords.length]; float[] vt = new float[4]; for (int i = 0; i < coords.length; i += 2) { float[] v = {coords[i], coords[i + 1], 0, 1}; Matrix.multiplyMV(vt, 0, matrix, 0, v, 0); result[i] = vt[0]; result[i + 1] = vt[1]; } return result; }
Example 11
Source File: GLES20Canvas.java From PhotoMovie with Apache License 2.0 | 5 votes |
@Override public void getBounds(Rect bounds, int x, int y, int width, int height) { Matrix.translateM(mTempMatrix, 0, mMatrices, mCurrentMatrixIndex, x, y, 0f); Matrix.scaleM(mTempMatrix, 0, width, height, 1f); Matrix.multiplyMV(mTempMatrix, MATRIX_SIZE, mTempMatrix, 0, BOUNDS_COORDINATES, 0); Matrix.multiplyMV(mTempMatrix, MATRIX_SIZE + 4, mTempMatrix, 0, BOUNDS_COORDINATES, 4); bounds.left = Math.round(mTempMatrix[MATRIX_SIZE]); bounds.right = Math.round(mTempMatrix[MATRIX_SIZE + 4]); bounds.top = Math.round(mTempMatrix[MATRIX_SIZE + 1]); bounds.bottom = Math.round(mTempMatrix[MATRIX_SIZE + 5]); bounds.sort(); }
Example 12
Source File: BoundingBox.java From android-3D-model-viewer with GNU Lesser General Public License v3.0 | 5 votes |
public static BoundingBox create(String id, FloatBuffer vertexBuffer, float[] modelMatrix) { float xMin = Float.MAX_VALUE, xMax = -Float.MAX_VALUE, yMin = Float.MAX_VALUE, yMax = -Float.MAX_VALUE, zMin = Float.MAX_VALUE, zMax = -Float.MAX_VALUE; vertexBuffer = vertexBuffer.asReadOnlyBuffer(); vertexBuffer.position(0); while (vertexBuffer.hasRemaining()) { float vertexx = vertexBuffer.get(); float vertexy = vertexBuffer.get(); float vertexz = vertexBuffer.get(); if (vertexx < xMin) { xMin = vertexx; } if (vertexx > xMax) { xMax = vertexx; } if (vertexy < yMin) { yMin = vertexy; } if (vertexy > yMax) { yMax = vertexy; } if (vertexz < zMin) { zMin = vertexz; } if (vertexz > zMax) { zMax = vertexz; } } float[] min = new float[]{xMin, yMin, zMin, 1}; float[] max = new float[]{xMax, yMax, zMax, 1}; Matrix.multiplyMV(min,0,modelMatrix,0,min,0); Matrix.multiplyMV(max,0,modelMatrix,0,max,0); return new BoundingBox(id, min[0], max[0], min[1], max[1], min[2], max[2]); }
Example 13
Source File: GLES20Canvas.java From LB-Launcher with Apache License 2.0 | 5 votes |
@Override public void getBounds(Rect bounds, int x, int y, int width, int height) { Matrix.translateM(mTempMatrix, 0, mMatrices, mCurrentMatrixIndex, x, y, 0f); Matrix.scaleM(mTempMatrix, 0, width, height, 1f); Matrix.multiplyMV(mTempMatrix, MATRIX_SIZE, mTempMatrix, 0, BOUNDS_COORDINATES, 0); Matrix.multiplyMV(mTempMatrix, MATRIX_SIZE + 4, mTempMatrix, 0, BOUNDS_COORDINATES, 4); bounds.left = Math.round(mTempMatrix[MATRIX_SIZE]); bounds.right = Math.round(mTempMatrix[MATRIX_SIZE + 4]); bounds.top = Math.round(mTempMatrix[MATRIX_SIZE + 1]); bounds.bottom = Math.round(mTempMatrix[MATRIX_SIZE + 5]); bounds.sort(); }
Example 14
Source File: TreasureHuntActivity.java From PanoramaGL with Apache License 2.0 | 5 votes |
/** * Check if user is looking at object by calculating where the object is in eye-space. * * @return true if the user is looking at the object. */ private boolean isLookingAtObject() { // Convert object space to camera space. Use the headView from onNewFrame. Matrix.multiplyMM(modelView, 0, headView, 0, modelCube, 0); Matrix.multiplyMV(tempPosition, 0, modelView, 0, POS_MATRIX_MULTIPLY_VEC, 0); float pitch = (float) Math.atan2(tempPosition[1], -tempPosition[2]); float yaw = (float) Math.atan2(tempPosition[0], -tempPosition[2]); return Math.abs(pitch) < PITCH_LIMIT && Math.abs(yaw) < YAW_LIMIT; }
Example 15
Source File: MDVector3D.java From Beginner-Level-Android-Studio-Apps with GNU General Public License v3.0 | 4 votes |
public void multiplyMV(float[] mat){ Matrix.multiplyMV(values, 0, mat, 0, values, 0); }
Example 16
Source File: Camera.java From react-native-3d-model-view with MIT License | 4 votes |
public static void multiplyMMV(float[] result, int retOffset, float[] matrix, int matOffet, float[] vector4Matrix, int vecOffset) { for (int i = 0; i < vector4Matrix.length / 4; i++) { Matrix.multiplyMV(result, retOffset + (i * 4), matrix, matOffet, vector4Matrix, vecOffset + (i * 4)); } }
Example 17
Source File: GeometryLoader.java From android-3D-model-viewer with GNU Lesser General Public License v3.0 | 4 votes |
private void loadSkinningData(final String geometryId) { float[] bindShapeMatrix = null; if (skinningDataMap != null && skinningDataMap.containsKey(geometryId)) { bindShapeMatrix = skinningDataMap.get(geometryId).getBindShapeMatrix(); } List<VertexSkinData> verticesSkinData = null; if (skinningDataMap == null || !skinningDataMap.containsKey(geometryId)) { Log.d("GeometryLoader","No skinning data available"); } else { verticesSkinData = skinningDataMap.get(geometryId).verticesSkinData; } JointData jointData = null; if (skeletonData != null){ jointData = skeletonData.getHeadJoint().find(geometryId); // FIXME: remove this whole if if (jointData == null){ jointData = skeletonData.getHeadJoint(); } } else { Log.d("GeometryLoader","No skeleton data available"); } // link vertex to weight data for (int i = 0; i < this.vertices.size(); i++) { Vertex vertex = this.vertices.get(i); // transform vertex according to bind_shape_matrix (trooper is using it) if (bindShapeMatrix != null) { float[] bindShaped = new float[16]; float[] positionV = new float[]{vertex.getPosition()[0],vertex.getPosition()[1],vertex.getPosition()[2],1}; Matrix.multiplyMV(bindShaped, 0, bindShapeMatrix, 0, positionV, 0); // FIXME: this is used by stormtrooper - i think it is skin data not geometry vertex.setPosition(new float[]{bindShaped[0],bindShaped[1],bindShaped[2]}); } // skinning data VertexSkinData weightsData = null; if (verticesSkinData != null) { weightsData = verticesSkinData.get(i); } // FIXME: do we really need this? if (weightsData == null && jointData != null) { weightsData = new VertexSkinData(); weightsData.addJointEffect(jointData.index, 1); weightsData.limitJointNumber(3); } vertex.setWeightsData(weightsData); } }
Example 18
Source File: ShapeRenderer.java From ShapesInOpenGLES2.0 with MIT License | 4 votes |
@Override public void onDrawFrame(GL10 glUnused) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); // Do a complete rotation every 10 seconds. long time = SystemClock.uptimeMillis() % 10000L; float angleInDegrees = (360.0f / 10000.0f) * ((int) time); // Calculate position of the light. Push into the distance. Matrix.setIdentityM(aLightModelMatrix, 0); Matrix.translateM(aLightModelMatrix, 0, 0.0f, 0.0f, -5.0f); Matrix.multiplyMV(aLightPosInWorldSpace, 0, aLightModelMatrix, 0, aLightPosInModelSpace, 0); Matrix.multiplyMV(aLightPosInEyeSpace, 0, aViewMatrix, 0, aLightPosInWorldSpace, 0); // Translate the cube into the screen. Matrix.setIdentityM(aModelMatrix, 0); if(aHeightMap != null){ Matrix.translateM(aModelMatrix, 0, 0, 0, -300.5f); }else { Matrix.translateM(aModelMatrix, 0, 0, 0, -3.5f); } if(aDeltaX != 0 || aDeltaY != 0) { aRotationStatus = false; } if(aRotationStatus) { Matrix.rotateM(aModelMatrix, 0, angleInDegrees, 1.0f, 0.0f, 0.0f); Matrix.rotateM(aModelMatrix, 0, angleInDegrees, 0.0f, 1.0f, 0.0f); Matrix.rotateM(aModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f); } // Set a matrix that contains the current rotation. Matrix.setIdentityM(aCurrentRotation, 0); Matrix.rotateM(aCurrentRotation, 0, aDeltaX, 0.0f, 1.0f, 0.0f); Matrix.rotateM(aCurrentRotation, 0, aDeltaY, 1.0f, 0.0f, 0.0f); aDeltaX = 0.0f; aDeltaY = 0.0f; // Multiply the current rotation by the accumulated rotation, and then set the accumulated rotation to the result. Matrix.multiplyMM(aTemporaryMatrix, 0, aCurrentRotation, 0, aAccumulatedRotation, 0); System.arraycopy(aTemporaryMatrix, 0, aAccumulatedRotation, 0, 16); // Rotate the cube taking the overall rotation into account. Matrix.multiplyMM(aTemporaryMatrix, 0, aModelMatrix, 0, aAccumulatedRotation, 0); System.arraycopy(aTemporaryMatrix, 0, aModelMatrix, 0, 16); // This multiplies the view matrix by the model matrix, and stores // the result in the MVP matrix // (which currently contains model * view). Matrix.multiplyMM(aMVPMatrix, 0, aViewMatrix, 0, aModelMatrix, 0); // This multiplies the modelview matrix by the projection matrix, // and stores the result in the MVP matrix // (which now contains model * view * projection). Matrix.multiplyMM(aTemporaryMatrix, 0, aProjectionMatrix, 0, aMVPMatrix, 0); System.arraycopy(aTemporaryMatrix, 0, aMVPMatrix, 0, 16); if(aPoints != null){ aPoints.render(aMVPMatrix); }else if(aLines != null){ aLines.render(aMVPMatrix); }else if(aTriangles != null){ aTriangles.render(aMVPMatrix); }else if(aQuad != null){ aQuad.render(aMVPMatrix, aTexture); }else if(aCubes != null){ aCubes.render(aMVPMatrix, aTexture); }else if(aSpheres != null){ aSpheres.render(aMVPMatrix); }else if(aHeightMap != null){ aHeightMap.render(aMVPMatrix); } }
Example 19
Source File: Object3DBuilder.java From android-3D-model-viewer with GNU Lesser General Public License v3.0 | 4 votes |
private static void buildBones(AnimatedModel animatedModel, Joint joint, float[] parentTransform, float[] parentPoint, int parentJoinIndex, FloatBuffer vertexBuffer){ float[] point = new float[4]; float[] transform = new float[16]; Matrix.multiplyMM(transform,0,parentTransform,0,joint.getBindLocalTransform(),0); Matrix.multiplyMV(point,0,transform,0,new float[]{0,0,0,1},0); float[] v = Math3DUtils.substract(point,parentPoint); float[] point1 = new float[]{point[0],point[1],point[2]-Matrix.length(v[0],v[1],v[2])*0.05f}; float[] point2 = new float[]{point[0],point[1],point[2]+Matrix.length(v[0],v[1],v[2])*0.05f}; float[] normal = Math3DUtils.calculateNormal(parentPoint, point1, point2); // TODO: remove this /*parentPoint = new float[]{vertexBuffer.get((int)(100* Math.random())),vertexBuffer.get((int)(100* Math.random ())),vertexBuffer.get((int)(100* Math.random()))};*/ animatedModel.getVertexArrayBuffer().put(parentPoint[0]); animatedModel.getVertexArrayBuffer().put(parentPoint[1]); animatedModel.getVertexArrayBuffer().put(parentPoint[2]); animatedModel.getVertexArrayBuffer().put(point1[0]); animatedModel.getVertexArrayBuffer().put(point1[1]); animatedModel.getVertexArrayBuffer().put(point1[2]); animatedModel.getVertexArrayBuffer().put(point2[0]); animatedModel.getVertexArrayBuffer().put(point2[1]); animatedModel.getVertexArrayBuffer().put(point2[2]); animatedModel.getVertexNormalsArrayBuffer().put(normal); animatedModel.getVertexNormalsArrayBuffer().put(normal); animatedModel.getVertexNormalsArrayBuffer().put(normal); animatedModel.getJointIds().put(parentJoinIndex); animatedModel.getJointIds().put(parentJoinIndex); animatedModel.getJointIds().put(parentJoinIndex); for (int i=3; i<9; i++) { animatedModel.getJointIds().put(joint.getIndex()); } for (int i=0; i<9; i+=3) { animatedModel.getVertexWeights().put(parentJoinIndex >= 0?1:0); animatedModel.getVertexWeights().put(0); animatedModel.getVertexWeights().put(0); } for (Joint child : joint.getChildren()){ buildBones(animatedModel,child,transform, point, joint.getIndex(), vertexBuffer); } }
Example 20
Source File: ARObject.java From geoar-app with Apache License 2.0 | 4 votes |
@Override public void render(final float[] projectionMatrix, final float[] viewMatrix, final float[] parentMatrix, final float[] lightPosition) { /** set the matrices to identity matrix */ Matrix.setIdentityM(modelMatrix, 0); Matrix.setIdentityM(modelViewMatrix, 0); Matrix.setIdentityM(mvpMatrix, 0); Matrix.setIdentityM(tmpMatrix, 0); // TODO i think position[0] must be translated negatively -> Check Matrix.translateM(modelMatrix, 0, -newPosition[0], newPosition[1], newPosition[2]); if (parentMatrix != null) { Matrix.multiplyMM(tmpMatrix, 0, parentMatrix, 0, modelMatrix, 0); System.arraycopy(tmpMatrix, 0, modelMatrix, 0, 16); Matrix.setIdentityM(tmpMatrix, 0); } Matrix.multiplyMM(modelViewMatrix, 0, viewMatrix, 0, modelMatrix, 0); Matrix.multiplyMM(mvpMatrix, 0, projectionMatrix, 0, modelViewMatrix, 0); // TODO XXX FIXME frustum test if (newPosition != null) { float[] vec = new float[] { 0, 0, 0, 1 }; Matrix.multiplyMV(vec, 0, modelMatrix, 0, vec, 0); if (!GLESCamera.frustumCulling(vec)) { isInFrustum = false; return; } /** object is in Frustum - update screen coordinates */ isInFrustum = true; updateScreenCoordinates(); } // isInFrustum = true; // TODO XXX FIXME are just active visualizations called !? -> check // for (VisualizationLayer layer : visualizationLayers.values()) { for (RenderFeature2 feature : renderFeatures) { feature.render(mvpMatrix, modelViewMatrix, lightPosition); } // } }