Java Code Examples for com.google.android.gms.vision.face.Face#getPosition()
The following examples show how to use
com.google.android.gms.vision.face.Face#getPosition() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ARFilterActivity.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 6 votes |
/** Given a face and a facial landmark position, * return the coordinates of the landmark if known, * or approximated coordinates (based on prior data) if not. */ private PointF getLandmarkPosition(Face face, int landmarkId) { for (Landmark landmark : face.getLandmarks()) { if (landmark.getType() == landmarkId) { return landmark.getPosition(); } } PointF landmarkPosition = mPreviousLandmarkPositions.get(landmarkId); if (landmarkPosition == null) { return null; } float x = face.getPosition().x + (landmarkPosition.x * face.getWidth()); float y = face.getPosition().y + (landmarkPosition.y * face.getHeight()); return new PointF(x, y); }
Example 2
Source File: FaceOverlayView.java From Eye-blink-detector with MIT License | 6 votes |
private void drawFaceBox(Canvas canvas, double scale) { //This should be defined as a member variable rather than //being created on each onDraw request, but left here for //emphasis. Paint paint = new Paint(); paint.setColor(Color.GREEN); paint.setStyle(Paint.Style.STROKE); paint.setStrokeWidth(5); float left = 0; float top = 0; float right = 0; float bottom = 0; for( int i = 0; i < mFaces.size(); i++ ) { Face face = mFaces.valueAt(i); left = (float) ( face.getPosition().x * scale ); top = (float) ( face.getPosition().y * scale ); right = (float) scale * ( face.getPosition().x + face.getWidth() ); bottom = (float) scale * ( face.getPosition().y + face.getHeight() ); canvas.drawRect( left, top, right, bottom, paint ); } }
Example 3
Source File: GooglyFaceTracker.java From android-vision with Apache License 2.0 | 6 votes |
/** * Finds a specific landmark position, or approximates the position based on past observations * if it is not present. */ private PointF getLandmarkPosition(Face face, int landmarkId) { for (Landmark landmark : face.getLandmarks()) { if (landmark.getType() == landmarkId) { return landmark.getPosition(); } } PointF prop = mPreviousProportions.get(landmarkId); if (prop == null) { return null; } float x = face.getPosition().x + (prop.x * face.getWidth()); float y = face.getPosition().y + (prop.y * face.getHeight()); return new PointF(x, y); }
Example 4
Source File: FaceOverlayView.java From AndroidDemoProjects with Apache License 2.0 | 6 votes |
private void drawFaceBox(Canvas canvas, double scale) { //This should be defined as a member variable rather than //being created on each onDraw request, but left here for //emphasis. Paint paint = new Paint(); paint.setColor(Color.GREEN); paint.setStyle(Paint.Style.STROKE); paint.setStrokeWidth(5); float left = 0; float top = 0; float right = 0; float bottom = 0; for( int i = 0; i < mFaces.size(); i++ ) { Face face = mFaces.valueAt(i); left = (float) ( face.getPosition().x * scale ); top = (float) ( face.getPosition().y * scale ); right = (float) scale * ( face.getPosition().x + face.getWidth() ); bottom = (float) scale * ( face.getPosition().y + face.getHeight() ); canvas.drawRect( left, top, right, bottom, paint ); } }
Example 5
Source File: MyFace.java From flutter_mobile_vision with MIT License | 5 votes |
public MyFace(Face face) { this.id = face.getId(); this.x = face.getPosition().x; this.y = face.getPosition().y; this.width = face.getWidth(); this.height = face.getHeight(); this.eulerY = face.getEulerY(); this.eulerZ = face.getEulerZ(); this.leftEyeOpenProbability = face.getIsLeftEyeOpenProbability(); this.rightEyeOpenProbability = face.getIsRightEyeOpenProbability(); this.smilingProbability = face.getIsSmilingProbability(); }
Example 6
Source File: ARFilterActivity.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 5 votes |
private void updatePreviousLandmarkPositions(Face face) { for (Landmark landmark : face.getLandmarks()) { PointF position = landmark.getPosition(); float xProp = (position.x - face.getPosition().x) / face.getWidth(); float yProp = (position.y - face.getPosition().y) / face.getHeight(); mPreviousLandmarkPositions.put(landmark.getType(), new PointF(xProp, yProp)); } }
Example 7
Source File: GooglyFaceTracker.java From android-vision with Apache License 2.0 | 5 votes |
private void updatePreviousProportions(Face face) { for (Landmark landmark : face.getLandmarks()) { PointF position = landmark.getPosition(); float xProp = (position.x - face.getPosition().x) / face.getWidth(); float yProp = (position.y - face.getPosition().y) / face.getHeight(); mPreviousProportions.put(landmark.getType(), new PointF(xProp, yProp)); } }
Example 8
Source File: FaceSwap.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 4 votes |
@Override public int compare(Face lhs, Face rhs) { return (int) (lhs.getPosition().x) - (int) (rhs.getPosition().x); }
Example 9
Source File: FaceDetectionImplGmsCore.java From 365browser with Apache License 2.0 | 4 votes |
@Override public void detect( SharedBufferHandle frameData, int width, int height, DetectResponse callback) { // The vision library will be downloaded the first time the API is used // on the device; this happens "fast", but it might have not completed, // bail in this case. if (!mFaceDetector.isOperational()) { Log.e(TAG, "FaceDetector is not operational"); // Fallback to Android's FaceDetectionImpl. FaceDetectorOptions options = new FaceDetectorOptions(); options.fastMode = mFastMode; options.maxDetectedFaces = mMaxFaces; FaceDetectionImpl detector = new FaceDetectionImpl(options); detector.detect(frameData, width, height, callback); return; } Frame frame = SharedBufferUtils.convertToFrame(frameData, width, height); if (frame == null) { Log.e(TAG, "Error converting SharedMemory to Frame"); callback.call(new FaceDetectionResult[0]); return; } final SparseArray<Face> faces = mFaceDetector.detect(frame); FaceDetectionResult[] faceArray = new FaceDetectionResult[faces.size()]; for (int i = 0; i < faces.size(); i++) { faceArray[i] = new FaceDetectionResult(); final Face face = faces.valueAt(i); final PointF corner = face.getPosition(); faceArray[i].boundingBox = new RectF(); faceArray[i].boundingBox.x = corner.x; faceArray[i].boundingBox.y = corner.y; faceArray[i].boundingBox.width = face.getWidth(); faceArray[i].boundingBox.height = face.getHeight(); final List<Landmark> landmarks = face.getLandmarks(); ArrayList<org.chromium.shape_detection.mojom.Landmark> mojoLandmarks = new ArrayList<org.chromium.shape_detection.mojom.Landmark>(landmarks.size()); for (int j = 0; j < landmarks.size(); j++) { final Landmark landmark = landmarks.get(j); final int landmarkType = landmark.getType(); if (landmarkType == Landmark.LEFT_EYE || landmarkType == Landmark.RIGHT_EYE || landmarkType == Landmark.BOTTOM_MOUTH) { org.chromium.shape_detection.mojom.Landmark mojoLandmark = new org.chromium.shape_detection.mojom.Landmark(); mojoLandmark.location = new org.chromium.gfx.mojom.PointF(); mojoLandmark.location.x = landmark.getPosition().x; mojoLandmark.location.y = landmark.getPosition().y; mojoLandmark.type = landmarkType == Landmark.BOTTOM_MOUTH ? LandmarkType.MOUTH : LandmarkType.EYE; mojoLandmarks.add(mojoLandmark); } } faceArray[i].landmarks = mojoLandmarks.toArray( new org.chromium.shape_detection.mojom.Landmark[mojoLandmarks.size()]); } callback.call(faceArray); }