Java Code Examples for android.hardware.camera2.CameraCharacteristics#get()
The following examples show how to use
android.hardware.camera2.CameraCharacteristics#get() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Helper.java From VIA-AI with MIT License | 8 votes |
public void forceScanAllCameras(Activity activity) { CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { String[] idList = manager.getCameraIdList(); int maxCameraCnt = idList.length; for (int index = 0; index < maxCameraCnt; index++) { String cameraId = manager.getCameraIdList()[index]; CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); } } catch (CameraAccessException e) { e.printStackTrace(); } }
Example 2
Source File: Camera2ApiManager.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | 6 votes |
/** * @required: <uses-permission android:name="android.permission.FLASHLIGHT"/> */ public void enableLantern() throws Exception { CameraCharacteristics characteristics = getCameraCharacteristics(); if (characteristics == null) return; if (characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) { if (builderInputSurface != null) { try { builderInputSurface.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH); cameraCaptureSession.setRepeatingRequest(builderInputSurface.build(), faceDetectionEnabled ? cb : null, null); lanternEnable = true; } catch (Exception e) { Log.e(TAG, "Error", e); } } } else { Log.e(TAG, "Lantern unsupported"); throw new Exception("Lantern unsupported"); } }
Example 3
Source File: Camera2ApiManager.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | 6 votes |
@SuppressLint("MissingPermission") public void openCameraId(Integer cameraId) { this.cameraId = cameraId; if (prepared) { HandlerThread cameraHandlerThread = new HandlerThread(TAG + " Id = " + cameraId); cameraHandlerThread.start(); cameraHandler = new Handler(cameraHandlerThread.getLooper()); try { cameraManager.openCamera(cameraId.toString(), this, cameraHandler); CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(Integer.toString(cameraId)); running = true; isFrontCamera = (LENS_FACING_FRONT == cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)); if (cameraCallbacks != null) { cameraCallbacks.onCameraChanged(isFrontCamera); } } catch (CameraAccessException | SecurityException e) { Log.e(TAG, "Error", e); } } else { Log.e(TAG, "Camera2ApiManager need be prepared, Camera2ApiManager not enabled"); } }
Example 4
Source File: Camera2ApiManager.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | 6 votes |
/** * @required: <uses-permission android:name="android.permission.FLASHLIGHT"/> */ public void disableLantern() { CameraCharacteristics characteristics = getCameraCharacteristics(); if (characteristics == null) return; if (characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) { if (builderInputSurface != null) { try { builderInputSurface.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF); cameraCaptureSession.setRepeatingRequest(builderInputSurface.build(), faceDetectionEnabled ? cb : null, null); lanternEnable = false; } catch (Exception e) { Log.e(TAG, "Error", e); } } } }
Example 5
Source File: Camera2Helper.java From DeviceConnect-Android with MIT License | 6 votes |
/** * カメラID に対応したカメラデバイスがサポートしているプレビューサイズのリストを取得します. * * @param cameraManager カメラマネージャ * @param cameraId カメラID * @return サポートしているプレビューサイズのリスト */ @NonNull public static List<Size> getSupportedPreviewSizes(final CameraManager cameraManager, final String cameraId) { List<Size> previewSizes = new ArrayList<>(); try { CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map != null) { previewSizes = Arrays.asList(map.getOutputSizes(SurfaceTexture.class)); Collections.sort(previewSizes, SizeComparator); } } catch (CameraAccessException e) { // ignore. } return previewSizes; }
Example 6
Source File: Camera2ApiManager.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | 6 votes |
public void enableFaceDetection(FaceDetectorCallback faceDetectorCallback) { CameraCharacteristics characteristics = getCameraCharacteristics(); if (characteristics == null) return; int[] fd = characteristics.get(CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES); int maxFD = characteristics.get(CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT); if (fd.length > 0) { List<Integer> fdList = new ArrayList<>(); for (int FaceD : fd) { fdList.add(FaceD); } if (maxFD > 0) { this.faceDetectorCallback = faceDetectorCallback; faceDetectionEnabled = true; faceDetectionMode = Collections.max(fdList); setFaceDetect(builderInputSurface, faceDetectionMode); prepareFaceDetectionCallback(); } else { Log.e(TAG, "No face detection"); } } else { Log.e(TAG, "No face detection"); } }
Example 7
Source File: CameraConnectionFragment.java From dbclf with Apache License 2.0 | 5 votes |
/** * Sets up member variables related to camera. */ private void setUpCameraOutputs() { final Activity activity = getActivity(); final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); final StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), inputSize.getWidth(), inputSize.getHeight()); // We fit the aspect ratio of TextureView to the size of preview we picked. final int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight()); } else { textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); } } catch (final CameraAccessException ignored) { } catch (final NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. throw new RuntimeException(getString(R.string.camera_error)); } cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation); }
Example 8
Source File: AutoFocusHelper.java From Camera2 with Apache License 2.0 | 5 votes |
/** * Calculates sensor crop region for a zoom level (zoom >= 1.0). * * @return Crop region. */ public static Rect cropRegionForZoom(CameraCharacteristics characteristics, float zoom) { Rect sensor = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); int xCenter = sensor.width() / 2; int yCenter = sensor.height() / 2; int xDelta = (int) (0.5f * sensor.width() / zoom); int yDelta = (int) (0.5f * sensor.height() / zoom); return new Rect(xCenter - xDelta, yCenter - yDelta, xCenter + xDelta, yCenter + yDelta); }
Example 9
Source File: CameraHandler.java From sample-tensorflow-imageclassifier with Apache License 2.0 | 5 votes |
/** * Initialize the camera device */ @SuppressLint("MissingPermission") public void initializeCamera(Context context, Handler backgroundHandler, Size minSize, ImageReader.OnImageAvailableListener imageAvailableListener) throws CameraAccessException { if (initialized) { throw new IllegalStateException( "CameraHandler is already initialized or is initializing"); } initialized = true; // Discover the camera instance CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); String camId = getCameraId(context); // Initialize the image processor with the largest available size. CameraCharacteristics characteristics = manager.getCameraCharacteristics(camId); StreamConfigurationMap map = characteristics.get( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); Size bestSize = getBestCameraSize(map.getOutputSizes(ImageFormat.JPEG), minSize); if (bestSize == null) { throw new RuntimeException("We could not find a camera resolution that is larger than " + minSize.getWidth() + "x" + minSize.getHeight()); } mImageReader = ImageReader.newInstance(bestSize.getWidth(), bestSize.getHeight(), ImageFormat.JPEG, MAX_IMAGES); mImageDimensions = bestSize; Log.d(TAG, "Will capture photos that are " + mImageDimensions.getWidth() + " x " + mImageDimensions.getHeight()); mImageReader.setOnImageAvailableListener(imageAvailableListener, backgroundHandler); // Open the camera resource try { manager.openCamera(camId, mStateCallback, backgroundHandler); } catch (CameraAccessException cae) { Log.e(TAG, "Camera access exception", cae); } }
Example 10
Source File: CustomVideoCapturerCamera2.java From opentok-android-sdk-samples with MIT License | 5 votes |
public CameraInfoCache(CameraCharacteristics info) { info = info; /* its actually faster to cache these results then to always look them up, and since they are queried every frame... */ frontFacing = info.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT; sensorOrientation = info.get(CameraCharacteristics.SENSOR_ORIENTATION).intValue(); }
Example 11
Source File: BaseCameraActivity.java From fritz-examples with MIT License | 5 votes |
private String chooseCamera() { final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { for (final String cameraId : manager.getCameraIdList()) { final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } final StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // Fallback to camera1 API for internal cameras that don't have full support. // This should help with legacy situations where using the camera2 API causes // distorted or otherwise broken previews. useCamera2API = (facing == CameraCharacteristics.LENS_FACING_EXTERNAL) || isHardwareLevelSupported(characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL); Log.i(TAG, "Camera API lv2?: " + useCamera2API); return cameraId; } } catch (CameraAccessException e) { Log.e(TAG, "Not allowed to access camera: " + e); } return null; }
Example 12
Source File: AndroidCamera2Capabilities.java From Camera2 with Apache License 2.0 | 5 votes |
private void buildWhiteBalances(CameraCharacteristics p) { int[] bals = p.get(CONTROL_AWB_AVAILABLE_MODES); if (bals != null) { for (int bal : bals) { WhiteBalance equiv = whiteBalanceFromInt(bal); if (equiv != null) { mSupportedWhiteBalances.add(equiv); } } } }
Example 13
Source File: Camera2Helper.java From CameraCompat with MIT License | 5 votes |
private void operateFlash(boolean isOpen) throws CameraAccessException, SecurityException { CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(mBackCameraId); Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); boolean flashSupported = available == null ? false : available; if (!flashSupported) { return; } mCameraController.onSettingsChanged(mCameraDevice, mCaptureSession, mOutputTargets, isOpen, mCamera2Handler); }
Example 14
Source File: BaseCameraActivity.java From fritz-examples with MIT License | 5 votes |
private boolean isHardwareLevelSupported( CameraCharacteristics characteristics, int requiredLevel) { int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { return requiredLevel == deviceLevel; } // deviceLevel is not LEGACY, can use numerical sort return requiredLevel <= deviceLevel; }
Example 15
Source File: Camera2Helper.java From DeviceConnect-Android with MIT License | 5 votes |
/** * 指定された facing に対応するカメラIDを取得します. * <p> * facing に対応したカメラが発見できない場合には null を返却します。 * </p> * @param cameraManager カメラマネージャ * @param facing カメラの向き * @return カメラID * @throws CameraAccessException カメラの操作に失敗した場合に発生 */ static String getCameraId(final CameraManager cameraManager, final int facing) throws CameraAccessException { for (String cameraId : cameraManager.getCameraIdList()) { CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); Integer supportFacing = characteristics.get(CameraCharacteristics.LENS_FACING); if (supportFacing != null && supportFacing == facing) { return cameraId; } } return null; }
Example 16
Source File: Camera2Enumerator.java From webrtc_android with MIT License | 5 votes |
@Override public boolean isFrontFacing(String deviceName) { CameraCharacteristics characteristics = getCameraCharacteristics(deviceName); return characteristics != null && characteristics.get(CameraCharacteristics.LENS_FACING) == CameraMetadata.LENS_FACING_FRONT; }
Example 17
Source File: VideoCaptureCamera2.java From 365browser with Apache License 2.0 | 5 votes |
static String getName(int id) { final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id); if (cameraCharacteristics == null) return null; final int facing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING); return "camera2 " + id + ", facing " + ((facing == CameraCharacteristics.LENS_FACING_FRONT) ? "front" : "back"); }
Example 18
Source File: Camera2Enumerator.java From VideoCRE with MIT License | 5 votes |
@Override public boolean isBackFacing(String deviceName) { CameraCharacteristics characteristics = getCameraCharacteristics(deviceName); return characteristics != null && characteristics.get(CameraCharacteristics.LENS_FACING) == CameraMetadata.LENS_FACING_BACK; }
Example 19
Source File: AndroidCamera2Capabilities.java From Camera2 with Apache License 2.0 | 4 votes |
AndroidCamera2Capabilities(CameraCharacteristics p) { super(new Stringifier()); StreamConfigurationMap s = p.get(SCALER_STREAM_CONFIGURATION_MAP); for (Range<Integer> fpsRange : p.get(CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)) { mSupportedPreviewFpsRange.add(new int[] { fpsRange.getLower(), fpsRange.getUpper() }); } // TODO: We only support TextureView preview rendering mSupportedPreviewSizes.addAll(Size.buildListFromAndroidSizes(Arrays.asList( s.getOutputSizes(SurfaceTexture.class)))); for (int format : s.getOutputFormats()) { mSupportedPreviewFormats.add(format); } // TODO: We only support MediaRecorder video capture mSupportedVideoSizes.addAll(Size.buildListFromAndroidSizes(Arrays.asList( s.getOutputSizes(MediaRecorder.class)))); // TODO: We only support JPEG image capture mSupportedPhotoSizes.addAll(Size.buildListFromAndroidSizes(Arrays.asList( s.getOutputSizes(ImageFormat.JPEG)))); mSupportedPhotoFormats.addAll(mSupportedPreviewFormats); buildSceneModes(p); buildFlashModes(p); buildFocusModes(p); buildWhiteBalances(p); // TODO: Populate mSupportedFeatures // TODO: Populate mPreferredPreviewSizeForVideo Range<Integer> ecRange = p.get(CONTROL_AE_COMPENSATION_RANGE); mMinExposureCompensation = ecRange.getLower(); mMaxExposureCompensation = ecRange.getUpper(); Rational ecStep = p.get(CONTROL_AE_COMPENSATION_STEP); mExposureCompensationStep = (float) ecStep.getNumerator() / ecStep.getDenominator(); mMaxNumOfFacesSupported = p.get(STATISTICS_INFO_MAX_FACE_COUNT); mMaxNumOfMeteringArea = p.get(CONTROL_MAX_REGIONS_AE); mMaxZoomRatio = p.get(SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); // TODO: Populate mHorizontalViewAngle // TODO: Populate mVerticalViewAngle // TODO: Populate mZoomRatioList // TODO: Populate mMaxZoomIndex if (supports(FocusMode.AUTO)) { mMaxNumOfFocusAreas = p.get(CONTROL_MAX_REGIONS_AF); if (mMaxNumOfFocusAreas > 0) { mSupportedFeatures.add(Feature.FOCUS_AREA); } } if (mMaxNumOfMeteringArea > 0) { mSupportedFeatures.add(Feature.METERING_AREA); } if (mMaxZoomRatio > CameraCapabilities.ZOOM_RATIO_UNZOOMED) { mSupportedFeatures.add(Feature.ZOOM); } // TODO: Detect other features }
Example 20
Source File: Camera2.java From SimpleVideoEditor with Apache License 2.0 | 4 votes |
void initCameraDevices() { try { String[] ids = mCameraManager.getCameraIdList(); if (ids.length == 0) { // 这表示这台机器上没有任何摄像头设备 mConfig.setCameraSupported(false); mCallback.onError(ICameraCallback.ERROR_CODE_NO_CAMERA_DEVICES); return; } for (String id : ids) { CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(id); Integer internal = characteristics.get(CameraCharacteristics.LENS_FACING); if (internal == null) { mCallback.onError(ICameraCallback.ERROR_CODE_UNKNOWN); continue; } // 目前仅支持前置和后置摄像头,其他外接摄像头暂不支持 if (internal == CameraCharacteristics.LENS_FACING_FRONT) { mDeviceInfo.addInfo(new Camera2DeviceInfo.Device(id, Constants.CAMERA_FACING_FRONT)); } else if (internal == CameraCharacteristics.LENS_FACING_BACK) { mDeviceInfo.addInfo(new Camera2DeviceInfo.Device(id, Constants.CAMERA_FACING_BACK)); // 目前仅考虑后置摄像头上的闪光灯 if (characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) { // 在闪光灯可用的情况下,默认关闭闪光灯 mConfig.setCurrentFlashlightState(Constants.FLASH_LIGHT_OFF); } } } // 优先使用后置摄像头 if (mDeviceInfo.getDeviceByFacing(Constants.CAMERA_FACING_BACK) != null) { mConfig.setCurrentFacing(Constants.CAMERA_FACING_BACK); } else if (mDeviceInfo.getDeviceByFacing(Constants.CAMERA_FACING_FRONT) != null) { mConfig.setCurrentFacing(Constants.CAMERA_FACING_FRONT); } mConfig.setCurrentCameraId(mDeviceInfo.getDeviceByFacing(mConfig.getCurrentFacing()).getCameraId()); } catch (CameraAccessException e) { //e.printStackTrace(); // 获取摄像头设备列表失败,一般都是没有权限 mCallback.onError(ICameraCallback.ERROR_CODE_NO_PERMISSION); } }