Java Code Examples for com.google.android.gms.vision.face.Face#getHeight()
The following examples show how to use
com.google.android.gms.vision.face.Face#getHeight() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ARFilterActivity.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 6 votes |
/** Given a face and a facial landmark position, * return the coordinates of the landmark if known, * or approximated coordinates (based on prior data) if not. */ private PointF getLandmarkPosition(Face face, int landmarkId) { for (Landmark landmark : face.getLandmarks()) { if (landmark.getType() == landmarkId) { return landmark.getPosition(); } } PointF landmarkPosition = mPreviousLandmarkPositions.get(landmarkId); if (landmarkPosition == null) { return null; } float x = face.getPosition().x + (landmarkPosition.x * face.getWidth()); float y = face.getPosition().y + (landmarkPosition.y * face.getHeight()); return new PointF(x, y); }
Example 2
Source File: FaceOverlayView.java From Eye-blink-detector with MIT License | 6 votes |
private void drawFaceBox(Canvas canvas, double scale) { //This should be defined as a member variable rather than //being created on each onDraw request, but left here for //emphasis. Paint paint = new Paint(); paint.setColor(Color.GREEN); paint.setStyle(Paint.Style.STROKE); paint.setStrokeWidth(5); float left = 0; float top = 0; float right = 0; float bottom = 0; for( int i = 0; i < mFaces.size(); i++ ) { Face face = mFaces.valueAt(i); left = (float) ( face.getPosition().x * scale ); top = (float) ( face.getPosition().y * scale ); right = (float) scale * ( face.getPosition().x + face.getWidth() ); bottom = (float) scale * ( face.getPosition().y + face.getHeight() ); canvas.drawRect( left, top, right, bottom, paint ); } }
Example 3
Source File: GooglyFaceTracker.java From android-vision with Apache License 2.0 | 6 votes |
/** * Finds a specific landmark position, or approximates the position based on past observations * if it is not present. */ private PointF getLandmarkPosition(Face face, int landmarkId) { for (Landmark landmark : face.getLandmarks()) { if (landmark.getType() == landmarkId) { return landmark.getPosition(); } } PointF prop = mPreviousProportions.get(landmarkId); if (prop == null) { return null; } float x = face.getPosition().x + (prop.x * face.getWidth()); float y = face.getPosition().y + (prop.y * face.getHeight()); return new PointF(x, y); }
Example 4
Source File: FaceOverlayView.java From AndroidDemoProjects with Apache License 2.0 | 6 votes |
private void drawFaceBox(Canvas canvas, double scale) { //This should be defined as a member variable rather than //being created on each onDraw request, but left here for //emphasis. Paint paint = new Paint(); paint.setColor(Color.GREEN); paint.setStyle(Paint.Style.STROKE); paint.setStrokeWidth(5); float left = 0; float top = 0; float right = 0; float bottom = 0; for( int i = 0; i < mFaces.size(); i++ ) { Face face = mFaces.valueAt(i); left = (float) ( face.getPosition().x * scale ); top = (float) ( face.getPosition().y * scale ); right = (float) scale * ( face.getPosition().x + face.getWidth() ); bottom = (float) scale * ( face.getPosition().y + face.getHeight() ); canvas.drawRect( left, top, right, bottom, paint ); } }
Example 5
Source File: MyFace.java From flutter_mobile_vision with MIT License | 5 votes |
public MyFace(Face face) { this.id = face.getId(); this.x = face.getPosition().x; this.y = face.getPosition().y; this.width = face.getWidth(); this.height = face.getHeight(); this.eulerY = face.getEulerY(); this.eulerZ = face.getEulerZ(); this.leftEyeOpenProbability = face.getIsLeftEyeOpenProbability(); this.rightEyeOpenProbability = face.getIsRightEyeOpenProbability(); this.smilingProbability = face.getIsSmilingProbability(); }
Example 6
Source File: ARFilterActivity.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 5 votes |
private void updatePreviousLandmarkPositions(Face face) { for (Landmark landmark : face.getLandmarks()) { PointF position = landmark.getPosition(); float xProp = (position.x - face.getPosition().x) / face.getWidth(); float yProp = (position.y - face.getPosition().y) / face.getHeight(); mPreviousLandmarkPositions.put(landmark.getType(), new PointF(xProp, yProp)); } }
Example 7
Source File: GooglyFaceTracker.java From android-vision with Apache License 2.0 | 5 votes |
private void updatePreviousProportions(Face face) { for (Landmark landmark : face.getLandmarks()) { PointF position = landmark.getPosition(); float xProp = (position.x - face.getPosition().x) / face.getWidth(); float yProp = (position.y - face.getPosition().y) / face.getHeight(); mPreviousProportions.put(landmark.getType(), new PointF(xProp, yProp)); } }
Example 8
Source File: FaceGraphic.java From Camera2Vision with Apache License 2.0 | 4 votes |
@Override public void draw(Canvas canvas) { Face face = mFace; if(face == null) { canvas.drawColor(0, PorterDuff.Mode.CLEAR); isSmilingProbability = -1; eyeRightOpenProbability= -1; eyeLeftOpenProbability = -1; return; } facePosition = new PointF(translateX(face.getPosition().x), translateY(face.getPosition().y)); faceWidth = face.getWidth() * 4; faceHeight = face.getHeight() * 4; faceCenter = new PointF(translateX(face.getPosition().x + faceWidth/8), translateY(face.getPosition().y + faceHeight/8)); isSmilingProbability = face.getIsSmilingProbability(); eyeRightOpenProbability = face.getIsRightEyeOpenProbability(); eyeLeftOpenProbability = face.getIsLeftEyeOpenProbability(); eulerY = face.getEulerY(); eulerZ = face.getEulerZ(); //DO NOT SET TO NULL THE NON EXISTENT LANDMARKS. USE OLDER ONES INSTEAD. for(Landmark landmark : face.getLandmarks()) { switch (landmark.getType()) { case Landmark.LEFT_EYE: leftEyePos = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_EYE: rightEyePos = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.NOSE_BASE: noseBasePos = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.LEFT_MOUTH: leftMouthCorner = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_MOUTH: rightMouthCorner = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.BOTTOM_MOUTH: mouthBase = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.LEFT_EAR: leftEar = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_EAR: rightEar = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.LEFT_EAR_TIP: leftEarTip = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_EAR_TIP: rightEarTip = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.LEFT_CHEEK: leftCheek = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; case Landmark.RIGHT_CHEEK: rightCheek = new PointF(translateX(landmark.getPosition().x), translateY(landmark.getPosition().y)); break; } } Paint mPaint = new Paint(); mPaint.setColor(Color.WHITE); mPaint.setStrokeWidth(4); if(faceCenter != null) canvas.drawBitmap(marker, faceCenter.x, faceCenter.y, null); if(noseBasePos != null) canvas.drawBitmap(marker, noseBasePos.x, noseBasePos.y, null); if(leftEyePos != null) canvas.drawBitmap(marker, leftEyePos.x, leftEyePos.y, null); if(rightEyePos != null) canvas.drawBitmap(marker, rightEyePos.x, rightEyePos.y, null); if(mouthBase != null) canvas.drawBitmap(marker, mouthBase.x, mouthBase.y, null); if(leftMouthCorner != null) canvas.drawBitmap(marker, leftMouthCorner.x, leftMouthCorner.y, null); if(rightMouthCorner != null) canvas.drawBitmap(marker, rightMouthCorner.x, rightMouthCorner.y, null); if(leftEar != null) canvas.drawBitmap(marker, leftEar.x, leftEar.y, null); if(rightEar != null) canvas.drawBitmap(marker, rightEar.x, rightEar.y, null); if(leftEarTip != null) canvas.drawBitmap(marker, leftEarTip.x, leftEarTip.y, null); if(rightEarTip != null) canvas.drawBitmap(marker, rightEarTip.x, rightEarTip.y, null); if(leftCheek != null) canvas.drawBitmap(marker, leftCheek.x, leftCheek.y, null); if(rightCheek != null) canvas.drawBitmap(marker, rightCheek.x, rightCheek.y, null); }
Example 9
Source File: FaceDetectionImplGmsCore.java From 365browser with Apache License 2.0 | 4 votes |
@Override public void detect( SharedBufferHandle frameData, int width, int height, DetectResponse callback) { // The vision library will be downloaded the first time the API is used // on the device; this happens "fast", but it might have not completed, // bail in this case. if (!mFaceDetector.isOperational()) { Log.e(TAG, "FaceDetector is not operational"); // Fallback to Android's FaceDetectionImpl. FaceDetectorOptions options = new FaceDetectorOptions(); options.fastMode = mFastMode; options.maxDetectedFaces = mMaxFaces; FaceDetectionImpl detector = new FaceDetectionImpl(options); detector.detect(frameData, width, height, callback); return; } Frame frame = SharedBufferUtils.convertToFrame(frameData, width, height); if (frame == null) { Log.e(TAG, "Error converting SharedMemory to Frame"); callback.call(new FaceDetectionResult[0]); return; } final SparseArray<Face> faces = mFaceDetector.detect(frame); FaceDetectionResult[] faceArray = new FaceDetectionResult[faces.size()]; for (int i = 0; i < faces.size(); i++) { faceArray[i] = new FaceDetectionResult(); final Face face = faces.valueAt(i); final PointF corner = face.getPosition(); faceArray[i].boundingBox = new RectF(); faceArray[i].boundingBox.x = corner.x; faceArray[i].boundingBox.y = corner.y; faceArray[i].boundingBox.width = face.getWidth(); faceArray[i].boundingBox.height = face.getHeight(); final List<Landmark> landmarks = face.getLandmarks(); ArrayList<org.chromium.shape_detection.mojom.Landmark> mojoLandmarks = new ArrayList<org.chromium.shape_detection.mojom.Landmark>(landmarks.size()); for (int j = 0; j < landmarks.size(); j++) { final Landmark landmark = landmarks.get(j); final int landmarkType = landmark.getType(); if (landmarkType == Landmark.LEFT_EYE || landmarkType == Landmark.RIGHT_EYE || landmarkType == Landmark.BOTTOM_MOUTH) { org.chromium.shape_detection.mojom.Landmark mojoLandmark = new org.chromium.shape_detection.mojom.Landmark(); mojoLandmark.location = new org.chromium.gfx.mojom.PointF(); mojoLandmark.location.x = landmark.getPosition().x; mojoLandmark.location.y = landmark.getPosition().y; mojoLandmark.type = landmarkType == Landmark.BOTTOM_MOUTH ? LandmarkType.MOUTH : LandmarkType.EYE; mojoLandmarks.add(mojoLandmark); } } faceArray[i].landmarks = mojoLandmarks.toArray( new org.chromium.shape_detection.mojom.Landmark[mojoLandmarks.size()]); } callback.call(faceArray); }