com.google.android.gms.vision.face.Landmark Java Examples
The following examples show how to use
com.google.android.gms.vision.face.Landmark.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DetectFacesFunction.java From face-detection-ane with Apache License 2.0 | 6 votes |
private String getFaceJSON( Face face ) { JSONObject json = new JSONObject(); try { json.put( "faceX", face.getPosition().x ); json.put( "faceY", face.getPosition().y ); json.put( "faceWidth", face.getWidth() ); json.put( "faceHeight", face.getHeight() ); json.put( "leftEyeOpenProbability", face.getIsLeftEyeOpenProbability() ); json.put( "rightEyeOpenProbability", face.getIsRightEyeOpenProbability() ); json.put( "isSmilingProbability", face.getIsSmilingProbability() ); List<Landmark> landmarks = face.getLandmarks(); for( Landmark landmark : landmarks ) { addLandmark( landmark, json ); } } catch( JSONException e ) { e.printStackTrace(); return null; } return json.toString(); }
Example #2
Source File: FaceOverlayView.java From AndroidDemoProjects with Apache License 2.0 | 6 votes |
private void drawFaceLandmarks( Canvas canvas, double scale ) { Paint paint = new Paint(); paint.setColor( Color.GREEN ); paint.setStyle( Paint.Style.STROKE ); paint.setStrokeWidth( 5 ); for( int i = 0; i < mFaces.size(); i++ ) { Face face = mFaces.valueAt(i); for ( Landmark landmark : face.getLandmarks() ) { int cx = (int) ( landmark.getPosition().x * scale ); int cy = (int) ( landmark.getPosition().y * scale ); canvas.drawCircle( cx, cy, 10, paint ); } } }
Example #3
Source File: FaceView.java From android-vision with Apache License 2.0 | 6 votes |
/** * Draws a small circle for each detected landmark, centered at the detected landmark position. * <p> * * Note that eye landmarks are defined to be the midpoint between the detected eye corner * positions, which tends to place the eye landmarks at the lower eyelid rather than at the * pupil position. */ private void drawFaceAnnotations(Canvas canvas, double scale) { Paint paint = new Paint(); paint.setColor(Color.GREEN); paint.setStyle(Paint.Style.STROKE); paint.setStrokeWidth(5); for (int i = 0; i < mFaces.size(); ++i) { Face face = mFaces.valueAt(i); for (Landmark landmark : face.getLandmarks()) { int cx = (int) (landmark.getPosition().x * scale); int cy = (int) (landmark.getPosition().y * scale); canvas.drawCircle(cx, cy, 10, paint); } } }
Example #4
Source File: GooglyFaceTracker.java From android-vision with Apache License 2.0 | 6 votes |
/** * Finds a specific landmark position, or approximates the position based on past observations * if it is not present. */ private PointF getLandmarkPosition(Face face, int landmarkId) { for (Landmark landmark : face.getLandmarks()) { if (landmark.getType() == landmarkId) { return landmark.getPosition(); } } PointF prop = mPreviousProportions.get(landmarkId); if (prop == null) { return null; } float x = face.getPosition().x + (prop.x * face.getWidth()); float y = face.getPosition().y + (prop.y * face.getHeight()); return new PointF(x, y); }
Example #5
Source File: DetectFacesFunction.java From face-detection-ane with Apache License 2.0 | 6 votes |
private String getLandmarkKey( int landmarkType ) { switch( landmarkType ) { case Landmark.BOTTOM_MOUTH: return "mouth"; case Landmark.LEFT_EYE: return "leftEye"; case Landmark.RIGHT_EYE: return "rightEye"; case Landmark.LEFT_EAR: return "leftEar"; case Landmark.LEFT_EAR_TIP: return "leftEarTip"; case Landmark.LEFT_CHEEK: return "leftCheek"; case Landmark.LEFT_MOUTH: return "leftMouth"; case Landmark.RIGHT_EAR: return "rightEar"; case Landmark.RIGHT_EAR_TIP: return "rightEarTip"; case Landmark.RIGHT_CHEEK: return "rightCheek"; case Landmark.RIGHT_MOUTH: return "rightMouth"; case Landmark.NOSE_BASE: return "noseBase"; default: return null; } }
Example #6
Source File: FaceOverlayView.java From Eye-blink-detector with MIT License | 6 votes |
private void drawFaceLandmarks( Canvas canvas, double scale ) { Paint paint = new Paint(); paint.setColor( Color.GREEN ); paint.setStyle( Paint.Style.STROKE ); paint.setStrokeWidth( 5 ); for( int i = 0; i < mFaces.size(); i++ ) { Face face = mFaces.valueAt(i); for ( Landmark landmark : face.getLandmarks() ) { int cx = (int) ( landmark.getPosition().x * scale ); int cy = (int) ( landmark.getPosition().y * scale ); canvas.drawCircle( cx, cy, 10, paint ); } } }
Example #7
Source File: ARFilterActivity.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 6 votes |
/** Given a face and a facial landmark position, * return the coordinates of the landmark if known, * or approximated coordinates (based on prior data) if not. */ private PointF getLandmarkPosition(Face face, int landmarkId) { for (Landmark landmark : face.getLandmarks()) { if (landmark.getType() == landmarkId) { return landmark.getPosition(); } } PointF landmarkPosition = mPreviousLandmarkPositions.get(landmarkId); if (landmarkPosition == null) { return null; } float x = face.getPosition().x + (landmarkPosition.x * face.getWidth()); float y = face.getPosition().y + (landmarkPosition.y * face.getHeight()); return new PointF(x, y); }
Example #8
Source File: DetectFacesFunction.java From face-detection-ane with Apache License 2.0 | 5 votes |
private void addLandmark( Landmark landmark, JSONObject json ) throws JSONException { /* Mouth position */ int landmarkType = landmark.getType(); String landmarkKey = getLandmarkKey( landmarkType ); if( landmarkKey != null ) { json.put( landmarkKey + "X", landmark.getPosition().x ); json.put( landmarkKey + "Y", landmark.getPosition().y ); } }
Example #9
Source File: GooglyFaceTracker.java From android-vision with Apache License 2.0 | 5 votes |
/** * Updates the positions and state of eyes to the underlying graphic, according to the most * recent face detection results. The graphic will render the eyes and simulate the motion of * the iris based upon these changes over time. */ @Override public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) { mOverlay.add(mEyesGraphic); updatePreviousProportions(face); PointF leftPosition = getLandmarkPosition(face, Landmark.LEFT_EYE); PointF rightPosition = getLandmarkPosition(face, Landmark.RIGHT_EYE); float leftOpenScore = face.getIsLeftEyeOpenProbability(); boolean isLeftOpen; if (leftOpenScore == Face.UNCOMPUTED_PROBABILITY) { isLeftOpen = mPreviousIsLeftOpen; } else { isLeftOpen = (leftOpenScore > EYE_CLOSED_THRESHOLD); mPreviousIsLeftOpen = isLeftOpen; } float rightOpenScore = face.getIsRightEyeOpenProbability(); boolean isRightOpen; if (rightOpenScore == Face.UNCOMPUTED_PROBABILITY) { isRightOpen = mPreviousIsRightOpen; } else { isRightOpen = (rightOpenScore > EYE_CLOSED_THRESHOLD); mPreviousIsRightOpen = isRightOpen; } mEyesGraphic.updateEyes(leftPosition, isLeftOpen, rightPosition, isRightOpen); }
Example #10
Source File: GooglyFaceTracker.java From android-vision with Apache License 2.0 | 5 votes |
private void updatePreviousProportions(Face face) { for (Landmark landmark : face.getLandmarks()) { PointF position = landmark.getPosition(); float xProp = (position.x - face.getPosition().x) / face.getWidth(); float yProp = (position.y - face.getPosition().y) / face.getHeight(); mPreviousProportions.put(landmark.getType(), new PointF(xProp, yProp)); } }
Example #11
Source File: ARFilterActivity.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 5 votes |
private void updatePreviousLandmarkPositions(Face face) { for (Landmark landmark : face.getLandmarks()) { PointF position = landmark.getPosition(); float xProp = (position.x - face.getPosition().x) / face.getWidth(); float yProp = (position.y - face.getPosition().y) / face.getHeight(); mPreviousLandmarkPositions.put(landmark.getType(), new PointF(xProp, yProp)); } }
Example #12
Source File: FaceGraphic.java From Camera2Vision with Apache License 2.0 | 4 votes |
@Override public void draw(Canvas canvas) { Face face = mFace; if(face == null) { canvas.drawColor(0, PorterDuff.Mode.CLEAR); isSmilingProbability = -1; eyeRightOpenProbability= -1; eyeLeftOpenProbability = -1; return; } facePosition = new PointF(translateX(face.getPosition().x), translateY(face.getPosition().y)); faceWidth = face.getWidth() * 4; faceHeight = face.getHeight() * 4; faceCenter = new PointF(translateX(face.getPosition().x + faceWidth/8), translateY(face.getPosition().y + faceHeight/8)); isSmilingProbability = face.getIsSmilingProbability(); eyeRightOpenProbability = face.getIsRightEyeOpenProbability(); eyeLeftOpenProbability = face.getIsLeftEyeOpenProbability(); eulerY = face.getEulerY(); eulerZ = face.getEulerZ(); //DO NOT SET TO NULL THE NON EXISTENT LANDMARKS. USE OLDER ONES INSTEAD. for(Landmark landmark : face.getLandmarks()) { switch (landmark.getType()) { case Landmark.LEFT_EYE: leftEyePos = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_EYE: rightEyePos = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.NOSE_BASE: noseBasePos = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.LEFT_MOUTH: leftMouthCorner = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_MOUTH: rightMouthCorner = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.BOTTOM_MOUTH: mouthBase = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.LEFT_EAR: leftEar = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_EAR: rightEar = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.LEFT_EAR_TIP: leftEarTip = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_EAR_TIP: rightEarTip = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.LEFT_CHEEK: leftCheek = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_CHEEK: rightCheek = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; } } Paint mPaint = new Paint(); mPaint.setColor(Color.WHITE); mPaint.setStrokeWidth(4); if(faceCenter != null) canvas.drawBitmap(marker, faceCenter.x, faceCenter.y, null); if(noseBasePos != null) canvas.drawBitmap(marker, noseBasePos.x, noseBasePos.y, null); if(leftEyePos != null) canvas.drawBitmap(marker, leftEyePos.x, leftEyePos.y, null); if(rightEyePos != null) canvas.drawBitmap(marker, rightEyePos.x, rightEyePos.y, null); if(mouthBase != null) canvas.drawBitmap(marker, mouthBase.x, mouthBase.y, null); if(leftMouthCorner != null) canvas.drawBitmap(marker, leftMouthCorner.x, leftMouthCorner.y, null); if(rightMouthCorner != null) canvas.drawBitmap(marker, rightMouthCorner.x, rightMouthCorner.y, null); if(leftEar != null) canvas.drawBitmap(marker, leftEar.x, leftEar.y, null); if(rightEar != null) canvas.drawBitmap(marker, rightEar.x, rightEar.y, null); if(leftEarTip != null) canvas.drawBitmap(marker, leftEarTip.x, leftEarTip.y, null); if(rightEarTip != null) canvas.drawBitmap(marker, rightEarTip.x, rightEarTip.y, null); if(leftCheek != null) canvas.drawBitmap(marker, leftCheek.x, leftCheek.y, null); if(rightCheek != null) canvas.drawBitmap(marker, rightCheek.x, rightCheek.y, null); }
Example #13
Source File: FaceRecognitionObject.java From MagicalCamera with Apache License 2.0 | 4 votes |
public List<Landmark> getListLandMarkPhoto() { return listLandMarkPhoto; }
Example #14
Source File: FaceRecognitionObject.java From MagicalCamera with Apache License 2.0 | 4 votes |
public void setListLandMarkPhoto(List<Landmark> listLandMarkPhoto) { this.listLandMarkPhoto = listLandMarkPhoto; }
Example #15
Source File: PhotoFace.java From TelePlus-Android with GNU General Public License v2.0 | 4 votes |
public PhotoFace(Face face, Bitmap sourceBitmap, Size targetSize, boolean sideward) { List<Landmark> landmarks = face.getLandmarks(); Point leftEyePoint = null; Point rightEyePoint = null; Point leftMouthPoint = null; Point rightMouthPoint = null; for (Landmark landmark : landmarks) { PointF point = landmark.getPosition(); switch (landmark.getType()) { case Landmark.LEFT_EYE: { leftEyePoint = transposePoint(point, sourceBitmap, targetSize, sideward); } break; case Landmark.RIGHT_EYE: { rightEyePoint = transposePoint(point, sourceBitmap, targetSize, sideward); } break; case Landmark.LEFT_MOUTH: { leftMouthPoint = transposePoint(point, sourceBitmap, targetSize, sideward); } break; case Landmark.RIGHT_MOUTH: { rightMouthPoint = transposePoint(point, sourceBitmap, targetSize, sideward); } break; } } if (leftEyePoint != null && rightEyePoint != null) { eyesCenterPoint = new Point(0.5f * leftEyePoint.x + 0.5f * rightEyePoint.x, 0.5f * leftEyePoint.y + 0.5f * rightEyePoint.y); eyesDistance = (float)Math.hypot(rightEyePoint.x - leftEyePoint.x, rightEyePoint.y - leftEyePoint.y); angle = (float)Math.toDegrees(Math.PI + Math.atan2(rightEyePoint.y - leftEyePoint.y, rightEyePoint.x - leftEyePoint.x)); width = eyesDistance * 2.35f; float foreheadHeight = 0.8f * eyesDistance; float upAngle = (float)Math.toRadians(angle - 90); foreheadPoint = new Point(eyesCenterPoint.x + foreheadHeight * (float)Math.cos(upAngle), eyesCenterPoint.y + foreheadHeight * (float)Math.sin(upAngle)); } if (leftMouthPoint != null && rightMouthPoint != null) { mouthPoint = new Point(0.5f * leftMouthPoint.x + 0.5f * rightMouthPoint.x, 0.5f * leftMouthPoint.y + 0.5f * rightMouthPoint.y); float chinDepth = 0.7f * eyesDistance; float downAngle = (float)Math.toRadians(angle + 90); chinPoint = new Point(mouthPoint.x + chinDepth * (float)Math.cos(downAngle), mouthPoint.y + chinDepth * (float)Math.sin(downAngle)); } }
Example #16
Source File: PhotoFace.java From TelePlus-Android with GNU General Public License v2.0 | 4 votes |
public PhotoFace(Face face, Bitmap sourceBitmap, Size targetSize, boolean sideward) { List<Landmark> landmarks = face.getLandmarks(); Point leftEyePoint = null; Point rightEyePoint = null; Point leftMouthPoint = null; Point rightMouthPoint = null; for (Landmark landmark : landmarks) { PointF point = landmark.getPosition(); switch (landmark.getType()) { case Landmark.LEFT_EYE: { leftEyePoint = transposePoint(point, sourceBitmap, targetSize, sideward); } break; case Landmark.RIGHT_EYE: { rightEyePoint = transposePoint(point, sourceBitmap, targetSize, sideward); } break; case Landmark.LEFT_MOUTH: { leftMouthPoint = transposePoint(point, sourceBitmap, targetSize, sideward); } break; case Landmark.RIGHT_MOUTH: { rightMouthPoint = transposePoint(point, sourceBitmap, targetSize, sideward); } break; } } if (leftEyePoint != null && rightEyePoint != null) { eyesCenterPoint = new Point(0.5f * leftEyePoint.x + 0.5f * rightEyePoint.x, 0.5f * leftEyePoint.y + 0.5f * rightEyePoint.y); eyesDistance = (float)Math.hypot(rightEyePoint.x - leftEyePoint.x, rightEyePoint.y - leftEyePoint.y); angle = (float)Math.toDegrees(Math.PI + Math.atan2(rightEyePoint.y - leftEyePoint.y, rightEyePoint.x - leftEyePoint.x)); width = eyesDistance * 2.35f; float foreheadHeight = 0.8f * eyesDistance; float upAngle = (float)Math.toRadians(angle - 90); foreheadPoint = new Point(eyesCenterPoint.x + foreheadHeight * (float)Math.cos(upAngle), eyesCenterPoint.y + foreheadHeight * (float)Math.sin(upAngle)); } if (leftMouthPoint != null && rightMouthPoint != null) { mouthPoint = new Point(0.5f * leftMouthPoint.x + 0.5f * rightMouthPoint.x, 0.5f * leftMouthPoint.y + 0.5f * rightMouthPoint.y); float chinDepth = 0.7f * eyesDistance; float downAngle = (float)Math.toRadians(angle + 90); chinPoint = new Point(mouthPoint.x + chinDepth * (float)Math.cos(downAngle), mouthPoint.y + chinDepth * (float)Math.sin(downAngle)); } }
Example #17
Source File: FaceDetectionImplGmsCore.java From 365browser with Apache License 2.0 | 4 votes |
@Override public void detect( SharedBufferHandle frameData, int width, int height, DetectResponse callback) { // The vision library will be downloaded the first time the API is used // on the device; this happens "fast", but it might have not completed, // bail in this case. if (!mFaceDetector.isOperational()) { Log.e(TAG, "FaceDetector is not operational"); // Fallback to Android's FaceDetectionImpl. FaceDetectorOptions options = new FaceDetectorOptions(); options.fastMode = mFastMode; options.maxDetectedFaces = mMaxFaces; FaceDetectionImpl detector = new FaceDetectionImpl(options); detector.detect(frameData, width, height, callback); return; } Frame frame = SharedBufferUtils.convertToFrame(frameData, width, height); if (frame == null) { Log.e(TAG, "Error converting SharedMemory to Frame"); callback.call(new FaceDetectionResult[0]); return; } final SparseArray<Face> faces = mFaceDetector.detect(frame); FaceDetectionResult[] faceArray = new FaceDetectionResult[faces.size()]; for (int i = 0; i < faces.size(); i++) { faceArray[i] = new FaceDetectionResult(); final Face face = faces.valueAt(i); final PointF corner = face.getPosition(); faceArray[i].boundingBox = new RectF(); faceArray[i].boundingBox.x = corner.x; faceArray[i].boundingBox.y = corner.y; faceArray[i].boundingBox.width = face.getWidth(); faceArray[i].boundingBox.height = face.getHeight(); final List<Landmark> landmarks = face.getLandmarks(); ArrayList<org.chromium.shape_detection.mojom.Landmark> mojoLandmarks = new ArrayList<org.chromium.shape_detection.mojom.Landmark>(landmarks.size()); for (int j = 0; j < landmarks.size(); j++) { final Landmark landmark = landmarks.get(j); final int landmarkType = landmark.getType(); if (landmarkType == Landmark.LEFT_EYE || landmarkType == Landmark.RIGHT_EYE || landmarkType == Landmark.BOTTOM_MOUTH) { org.chromium.shape_detection.mojom.Landmark mojoLandmark = new org.chromium.shape_detection.mojom.Landmark(); mojoLandmark.location = new org.chromium.gfx.mojom.PointF(); mojoLandmark.location.x = landmark.getPosition().x; mojoLandmark.location.y = landmark.getPosition().y; mojoLandmark.type = landmarkType == Landmark.BOTTOM_MOUTH ? LandmarkType.MOUTH : LandmarkType.EYE; mojoLandmarks.add(mojoLandmark); } } faceArray[i].landmarks = mojoLandmarks.toArray( new org.chromium.shape_detection.mojom.Landmark[mojoLandmarks.size()]); } callback.call(faceArray); }
Example #18
Source File: ARFilterActivity.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 4 votes |
@Override public void onUpdate(FaceDetector.Detections detectionResults, Face face) { mOverlay.add(mFaceGraphic); updatePreviousLandmarkPositions(face); // Get head angles. mFaceData.setEulerY(face.getEulerY()); mFaceData.setEulerZ(face.getEulerZ()); // Get face dimensions. mFaceData.setPosition(face.getPosition()); mFaceData.setWidth(face.getWidth()); mFaceData.setHeight(face.getHeight()); // Get the positions of facial landmarks. mFaceData.setLeftEyePosition(getLandmarkPosition(face, Landmark.LEFT_EYE)); mFaceData.setRightEyePosition(getLandmarkPosition(face, Landmark.RIGHT_EYE)); mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.LEFT_CHEEK)); mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.RIGHT_CHEEK)); mFaceData.setNoseBasePosition(getLandmarkPosition(face, Landmark.NOSE_BASE)); mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.LEFT_EAR)); mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.LEFT_EAR_TIP)); mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.RIGHT_EAR)); mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.RIGHT_EAR_TIP)); mFaceData.setMouthLeftPosition(getLandmarkPosition(face, Landmark.LEFT_MOUTH)); mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.BOTTOM_MOUTH)); mFaceData.setMouthRightPosition(getLandmarkPosition(face, Landmark.RIGHT_MOUTH)); // 1 final float EYE_CLOSED_THRESHOLD = 0.4f; float leftOpenScore = face.getIsLeftEyeOpenProbability(); if (leftOpenScore == Face.UNCOMPUTED_PROBABILITY) { mFaceData.setLeftEyeOpen(mPreviousIsLeftEyeOpen); } else { mFaceData.setLeftEyeOpen(leftOpenScore > EYE_CLOSED_THRESHOLD); mPreviousIsLeftEyeOpen = mFaceData.isLeftEyeOpen(); } float rightOpenScore = face.getIsRightEyeOpenProbability(); if (rightOpenScore == Face.UNCOMPUTED_PROBABILITY) { mFaceData.setRightEyeOpen(mPreviousIsRightEyeOpen); } else { mFaceData.setRightEyeOpen(rightOpenScore > EYE_CLOSED_THRESHOLD); mPreviousIsRightEyeOpen = mFaceData.isRightEyeOpen(); } // 2 // See if there's a smile! // Determine if person is smiling. final float SMILING_THRESHOLD = 0.8f; mFaceData.setSmiling(face.getIsSmilingProbability() > SMILING_THRESHOLD); mFaceGraphic.update(mFaceData); }