Java Code Examples for android.hardware.Camera#Face
The following examples show how to use
android.hardware.Camera#Face .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DrawFaceView.java From RairDemo with Apache License 2.0 | 6 votes |
@Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); canvas.setMatrix(matrix); for (Camera.Face face : faces) { if (face == null) { break; } canvas.drawRect(face.rect, paint); if (face.leftEye != null) { canvas.drawPoint(face.leftEye.x, face.leftEye.y, paint); } if (face.rightEye != null) { canvas.drawPoint(face.rightEye.x, face.rightEye.y, paint); } if (face.mouth != null) { canvas.drawPoint(face.mouth.x, face.mouth.y, paint); } // 因为旋转了画布矩阵,所以字体也跟着旋转 // canvas.drawText(String.valueOf("id:" + face.id + "\n置信度:" + face.score), face.rect.left, face.rect.bottom + 10, paint); } if (isClear) { canvas.drawColor(Color.WHITE, PorterDuff.Mode.CLEAR); isClear = false; } }
Example 2
Source File: MainActivity.java From RairDemo with Apache License 2.0 | 6 votes |
@Override public void onFaceDetection(Camera.Face[] faces, Camera camera) { if (faces.length > 0) { Camera.Face face = faces[0]; Rect rect = face.rect; Log.d("FaceDetection", "可信度:" + face.score + "face detected: " + faces.length + " Face 1 Location X: " + rect.centerX() + "Y: " + rect.centerY() + " " + rect.left + " " + rect.top + " " + rect.right + " " + rect.bottom); Matrix matrix = new Matrix(); Camera.CameraInfo info = new Camera.CameraInfo(); boolean mirror = (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT); matrix.setScale(mirror ? -1 : 1, 1); matrix.postRotate(90); matrix.postScale(surfaceView.getWidth() / 2000f, surfaceView.getHeight() / 2000f); matrix.postTranslate(surfaceView.getWidth() / 2f, surfaceView.getHeight() / 2f); faceView.updateFaces(matrix, faces); if (safeToTakePicture) { takePhoto(); safeToTakePicture = false; } } else { faceView.removeRect(); safeToTakePicture = true; } }
Example 3
Source File: ParameterUtils.java From android_9.0.0_r45 with Apache License 2.0 | 5 votes |
/** * Convert an api1 face into an active-array based api2 face. * * <p>Out-of-ranges scores and ids will be clipped to be within range (with a warning).</p> * * @param face a non-{@code null} api1 face * @param activeArraySize active array size of the sensor (e.g. max jpeg size) * @param zoomData the calculated zoom data corresponding to this request * * @return a non-{@code null} api2 face * * @throws NullPointerException if the {@code face} was {@code null} */ public static Face convertFaceFromLegacy(Camera.Face face, Rect activeArray, ZoomData zoomData) { checkNotNull(face, "face must not be null"); Face api2Face; Camera.Area fakeArea = new Camera.Area(face.rect, /*weight*/1); WeightedRectangle faceRect = convertCameraAreaToActiveArrayRectangle(activeArray, zoomData, fakeArea); Point leftEye = face.leftEye, rightEye = face.rightEye, mouth = face.mouth; if (leftEye != null && rightEye != null && mouth != null && leftEye.x != -2000 && leftEye.y != -2000 && rightEye.x != -2000 && rightEye.y != -2000 && mouth.x != -2000 && mouth.y != -2000) { leftEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData, leftEye, /*usePreviewCrop*/true); rightEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData, leftEye, /*usePreviewCrop*/true); mouth = convertCameraPointToActiveArrayPoint(activeArray, zoomData, leftEye, /*usePreviewCrop*/true); api2Face = faceRect.toFace(face.id, leftEye, rightEye, mouth); } else { api2Face = faceRect.toFace(); } return api2Face; }
Example 4
Source File: AndroidCameraAgentImpl.java From Camera2 with Apache License 2.0 | 5 votes |
@Override public void onFaceDetection( final Camera.Face[] faces, Camera camera) { mHandler.post(new Runnable() { @Override public void run() { mCallback.onFaceDetection(faces, mCamera); } }); }
Example 5
Source File: DrawFaceView.java From RairDemo with Apache License 2.0 | 5 votes |
private void init() { paint = new Paint(); paint.setColor(Color.GREEN); paint.setAntiAlias(true); paint.setStyle(Paint.Style.STROKE); faces = new Camera.Face[]{}; }
Example 6
Source File: FaceDetectorUtil.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | 5 votes |
public FaceParsed camera1Parse(Camera.Face face, View view, PointF scale, int rotation, boolean isFrontCamera) { //Parse face RectF rect = new RectF(face.rect); Matrix matrix = new Matrix(); matrix.setScale(isFrontCamera ? -1 : 1, 1); matrix.postRotate(rotation); matrix.postScale(view.getWidth() / 2000f, view.getHeight() / 2000f); matrix.postTranslate(view.getWidth() / 2f, view.getHeight() / 2f); matrix.mapRect(rect); return getFace(rect, scale, view); }
Example 7
Source File: RCTCameraViewFinder.java From react-native-camera-face-detector with MIT License | 5 votes |
@Override public void onFaceDetection(Camera.Face[] faces, Camera camera) { if (faces.length > 0) { Matrix matrix = new Matrix(); boolean frontCamera = (getCameraType() == RCTCameraModule.RCT_CAMERA_TYPE_FRONT); int height = getHeight(); int width = getWidth(); matrix.setScale(frontCamera ? -1 : 1, 1); matrix.postRotate(RCTCamera.getInstance().getOrientation()); matrix.postScale(width / 2000f, height / 2000f); matrix.postTranslate(width / 2f, height / 2f); double pixelDensity = getPixelDensity(); for (Camera.Face face : faces) { RectF faceRect = new RectF(face.rect); matrix.mapRect(faceRect); WritableMap faceEvent; faceEvent = Arguments.createMap(); faceEvent.putInt("faceID", face.id); faceEvent.putBoolean("isFrontCamera", frontCamera); faceEvent.putDouble("x", faceRect.left / pixelDensity); faceEvent.putDouble("y", faceRect.top / pixelDensity); faceEvent.putDouble("h", faceRect.height() / pixelDensity); faceEvent.putDouble("w", faceRect.width() / pixelDensity); ((ReactContext) getContext()).getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit("CameraFaceDetected", faceEvent); } } }
Example 8
Source File: FaceDetectionRequest.java From RxCamera with MIT License | 5 votes |
@Override public void onFaceDetection(Camera.Face[] faces, Camera camera) { if (subscriber != null && !subscriber.isUnsubscribed() && rxCamera.isOpenCamera()) { RxCameraData cameraData = new RxCameraData(); cameraData.faceList = faces; subscriber.onNext(cameraData); } }
Example 9
Source File: LegacyFaceDetectMapper.java From android_9.0.0_r45 with Apache License 2.0 | 4 votes |
/** * Update the {@code result} camera metadata map with the new value for the * {@code statistics.faces} and {@code statistics.faceDetectMode}. * * <p>Face detect callbacks are processed in the background, and each call to * {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.</p> * * <p>If the scene mode was set to {@code FACE_PRIORITY} but face detection is disabled, * the camera will still run face detection in the background, but no faces will be reported * in the capture result.</p> * * @param result a non-{@code null} result * @param legacyRequest a non-{@code null} request (read-only) */ public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) { checkNotNull(result, "result must not be null"); checkNotNull(legacyRequest, "legacyRequest must not be null"); Camera.Face[] faces, previousFaces; int fdMode; boolean fdScenePriority; synchronized (mLock) { fdMode = mFaceDetectReporting ? STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF; if (mFaceDetectReporting) { faces = mFaces; } else { faces = null; } fdScenePriority = mFaceDetectScenePriority; previousFaces = mFacesPrev; mFacesPrev = faces; } CameraCharacteristics characteristics = legacyRequest.characteristics; CaptureRequest request = legacyRequest.captureRequest; Size previewSize = legacyRequest.previewSize; Camera.Parameters params = legacyRequest.parameters; Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArray, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params); List<Face> convertedFaces = new ArrayList<>(); if (faces != null) { for (Camera.Face face : faces) { if (face != null) { convertedFaces.add( ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData)); } else { Log.w(TAG, "mapResultFaces - read NULL face from camera1 device"); } } } if (DEBUG && previousFaces != faces) { // Log only in verbose and IF the faces changed Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces)); } result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0])); result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode); // Override scene mode with FACE_PRIORITY if the request was using FACE_PRIORITY if (fdScenePriority) { result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_FACE_PRIORITY); } }
Example 10
Source File: Camera1ApiManager.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | 4 votes |
@Override public void onFaceDetection(Camera.Face[] faces, Camera camera) { if (faceDetectorCallback != null) faceDetectorCallback.onGetFaces(faces); }
Example 11
Source File: CameraAgent.java From Camera2 with Apache License 2.0 | 2 votes |
/** * Callback for face detection. * * @param faces Recognized face in the preview. * @param camera The camera which the preview image comes from. */ public void onFaceDetection(Camera.Face[] faces, CameraProxy camera);
Example 12
Source File: DrawFaceView.java From RairDemo with Apache License 2.0 | 2 votes |
/** * 绘制脸部方框 * * @param matrix 旋转画布的矩阵 * @param faces 脸部信息数组 */ public void updateFaces(Matrix matrix, Camera.Face[] faces) { this.matrix = matrix; this.faces = faces; invalidate(); }
Example 13
Source File: Camera1ApiManager.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | votes |
void onGetFaces(Camera.Face[] faces);