Java Code Examples for android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes()
The following examples show how to use
android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VIACamera.java From VIA-AI with MIT License | 8 votes |
@TargetApi(21) public static Size[] querySupportResolution(Context context, int index) { CameraManager manager = (CameraManager)context.getSystemService(CAMERA_SERVICE); try { CameraCharacteristics chars = manager.getCameraCharacteristics(index+""); StreamConfigurationMap configurationMap = chars.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); return configurationMap.getOutputSizes(ImageFormat.YUV_420_888); } catch(CameraAccessException e){ e.printStackTrace(); } return null; }
Example 2
Source File: Camera2Helper.java From CameraCompat with MIT License | 6 votes |
@Override protected List<PreviewSize> getSupportedSize() { try { CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics( getCurrentCameraId()); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { return Collections.singletonList(new PreviewSize(mPreviewWidth, mPreviewHeight)); } Size[] supportedSize = map.getOutputSizes(SurfaceTexture.class); if (supportedSize == null || supportedSize.length == 0) { return Collections.singletonList(new PreviewSize(mPreviewWidth, mPreviewHeight)); } List<PreviewSize> results = new ArrayList<>(); for (Size size : supportedSize) { results.add(new PreviewSize(size.getWidth(), size.getHeight())); } return results; } catch (CameraAccessException e) { throw new CameraAccessError(); } }
Example 3
Source File: Camera2.java From MediaPickerInstagram with Apache License 2.0 | 6 votes |
/** * <p>Collects some information from {@link #mCameraCharacteristics}.</p> * <p>This rewrites {@link #mPreviewSizes}, {@link #mPictureSizes}, and optionally, * {@link #mAspectRatio}.</p> */ private void collectCameraInfo() { StreamConfigurationMap map = mCameraCharacteristics.get( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { throw new IllegalStateException("Failed to get configuration map: " + mCameraId); } mPreviewSizes.clear(); for (android.util.Size size : map.getOutputSizes(mPreview.getOutputClass())) { mPreviewSizes.add(new Size(size.getWidth(), size.getHeight())); } mPictureSizes.clear(); collectPictureSizes(mPictureSizes, map); if (!mPreviewSizes.ratios().contains(mAspectRatio)) { mAspectRatio = mPreviewSizes.ratios().iterator().next(); } }
Example 4
Source File: Camera2ApiManager.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | 6 votes |
public Size[] getCameraResolutions(Facing facing) { try { CameraCharacteristics characteristics = getCharacteristicsForFacing(cameraManager, facing); if (characteristics == null) { return new Size[0]; } StreamConfigurationMap streamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); Size[] outputSizes = streamConfigurationMap.getOutputSizes(SurfaceTexture.class); return outputSizes != null ? outputSizes : new Size[0]; } catch (CameraAccessException | NullPointerException e) { Log.e(TAG, "Error", e); return new Size[0]; } }
Example 5
Source File: Camera2Controller.java From pixelvisualcorecamera with Apache License 2.0 | 6 votes |
public Size[] getSupportedPictureSizes(String cameraId) { Size[] outputSizes = null; CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); try { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); StreamConfigurationMap map = characteristics.get( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map != null) { outputSizes = map.getOutputSizes(ImageFormat.JPEG); } } catch (CameraAccessException e) { Log.w(TAG, e); } return outputSizes; }
Example 6
Source File: OneCameraCharacteristicsImpl.java From Camera2 with Apache License 2.0 | 6 votes |
@Override public List<Size> getSupportedPreviewSizes() { StreamConfigurationMap configMap; try { configMap = mCameraCharacteristics.get( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); } catch (Exception ex) { Log.e(TAG, "Unable to obtain preview sizes.", ex); // See b/19623115 where java.lang.AssertionError can be thrown due to HAL error return new ArrayList<>(0); } ArrayList<Size> supportedPictureSizes = new ArrayList<>(); for (android.util.Size androidSize : configMap.getOutputSizes(SurfaceTexture.class)) { supportedPictureSizes.add(new Size(androidSize)); } return supportedPictureSizes; }
Example 7
Source File: OneCameraImpl.java From Camera2 with Apache License 2.0 | 6 votes |
/** * @return The largest supported picture size. */ public Size getDefaultPictureSize() { StreamConfigurationMap configs = mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat); // Find the largest supported size. android.util.Size largestSupportedSize = supportedSizes[0]; long largestSupportedSizePixels = largestSupportedSize.getWidth() * largestSupportedSize.getHeight(); for (int i = 1; i < supportedSizes.length; i++) { long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight(); if (numPixels > largestSupportedSizePixels) { largestSupportedSize = supportedSizes[i]; largestSupportedSizePixels = numPixels; } } return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight()); }
Example 8
Source File: Camera2.java From TikTok with Apache License 2.0 | 5 votes |
/** * <p>Collects some information from {@link #mCameraCharacteristics}.</p> * <p>This rewrites {@link #mPreviewSizes}, {@link #mPictureSizes}, and optionally, * {@link #mAspectRatio}.</p> */ private void collectCameraInfo() { StreamConfigurationMap map = mCameraCharacteristics.get( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { throw new IllegalStateException("Failed to get configuration map: " + mCameraId); } mPreviewSizes.clear(); for (android.util.Size size : map.getOutputSizes(mPreview.getOutputClass())) { int width = size.getWidth(); int height = size.getHeight(); if (width <= MAX_PREVIEW_WIDTH && height <= MAX_PREVIEW_HEIGHT) { mPreviewSizes.add(new Size(width, height)); } } mPictureSizes.clear(); collectPictureSizes(mPictureSizes, map); for (AspectRatio ratio : mPreviewSizes.ratios()) { if (!mPictureSizes.ratios().contains(ratio)) { mPreviewSizes.remove(ratio); } } if (!mPreviewSizes.ratios().contains(mAspectRatio)) { mAspectRatio = mPreviewSizes.ratios().iterator().next(); } }
Example 9
Source File: CameraHandler.java From sample-tensorflow-imageclassifier with Apache License 2.0 | 5 votes |
/** * Helpful debugging method: Dump all supported camera formats to log. You don't need to run * this for normal operation, but it's very helpful when porting this code to different * hardware. */ public static void dumpFormatInfo(Context context) { // Discover the camera instance CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); String camId = getCameraId(context); if (camId == null) { return; } Log.d(TAG, "Using camera id " + camId); try { CameraCharacteristics characteristics = manager.getCameraCharacteristics(camId); StreamConfigurationMap configs = characteristics.get( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); for (int format : configs.getOutputFormats()) { Log.d(TAG, "Getting sizes for format: " + format); for (Size s : configs.getOutputSizes(format)) { Log.d(TAG, "\t" + s.toString()); } } int[] effects = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_EFFECTS); for (int effect : effects) { Log.d(TAG, "Effect available: " + effect); } } catch (CameraAccessException e) { Log.e(TAG, "Camera access exception getting characteristics."); } }
Example 10
Source File: HdrViewfinderActivity.java From android-HdrViewfinder with Apache License 2.0 | 5 votes |
/** * Configure the surfaceview and RS processing. */ private void configureSurfaces() { // Find a good size for output - largest 16:9 aspect ratio that's less than 720p final int MAX_WIDTH = 1280; final float TARGET_ASPECT = 16.f / 9.f; final float ASPECT_TOLERANCE = 0.1f; StreamConfigurationMap configs = mCameraInfo.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (configs == null) { throw new RuntimeException("Cannot get available picture/preview sizes."); } Size[] outputSizes = configs.getOutputSizes(SurfaceHolder.class); Size outputSize = outputSizes[0]; float outputAspect = (float) outputSize.getWidth() / outputSize.getHeight(); for (Size candidateSize : outputSizes) { if (candidateSize.getWidth() > MAX_WIDTH) continue; float candidateAspect = (float) candidateSize.getWidth() / candidateSize.getHeight(); boolean goodCandidateAspect = Math.abs(candidateAspect - TARGET_ASPECT) < ASPECT_TOLERANCE; boolean goodOutputAspect = Math.abs(outputAspect - TARGET_ASPECT) < ASPECT_TOLERANCE; if ((goodCandidateAspect && !goodOutputAspect) || candidateSize.getWidth() > outputSize.getWidth()) { outputSize = candidateSize; outputAspect = candidateAspect; } } Log.i(TAG, "Resolution chosen: " + outputSize); // Configure processing mProcessor = new ViewfinderProcessor(mRS, outputSize); setupProcessor(); // Configure the output view - this will fire surfaceChanged mPreviewView.setAspectRatio(outputAspect); mPreviewView.getHolder().setFixedSize(outputSize.getWidth(), outputSize.getHeight()); }
Example 11
Source File: FastQrReaderViewPlugin.java From fast_qr_reader_view with MIT License | 5 votes |
private void computeBestPreviewAndRecordingSize( StreamConfigurationMap streamConfigurationMap, Size minPreviewSize, Size captureSize) { Size[] sizes = streamConfigurationMap.getOutputSizes(SurfaceTexture.class); float captureSizeRatio = (float) captureSize.getWidth() / captureSize.getHeight(); List<Size> goodEnough = new ArrayList<>(); for (Size s : sizes) { if ((float) s.getWidth() / s.getHeight() == captureSizeRatio && minPreviewSize.getWidth() < s.getWidth() && minPreviewSize.getHeight() < s.getHeight()) { goodEnough.add(s); } } Collections.sort(goodEnough, new CompareSizesByArea()); if (goodEnough.isEmpty()) { previewSize = sizes[0]; videoSize = sizes[0]; } else { previewSize = goodEnough.get(0); // Video capture size should not be greater than 1080 because MediaRecorder cannot handle higher resolutions. videoSize = goodEnough.get(0); for (int i = goodEnough.size() - 1; i >= 0; i--) { if (goodEnough.get(i).getHeight() <= 1080) { videoSize = goodEnough.get(i); break; } } } }
Example 12
Source File: Camera2.java From LockDemo with Apache License 2.0 | 4 votes |
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) { for (android.util.Size size : map.getOutputSizes(ImageFormat.JPEG)) { mPictureSizes.add(new Size(size.getWidth(), size.getHeight())); } }
Example 13
Source File: CameraThread.java From CameraRecorder-android with MIT License | 4 votes |
/** * start camera preview * * @param width * @param height */ @SuppressLint("MissingPermission") final void startPreview(final int width, final int height) { Log.v(TAG, "startPreview:"); try { if (cameraManager == null) return; for (String cameraId : cameraManager.getCameraIdList()) { CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); if (characteristics.get(CameraCharacteristics.LENS_FACING) == null || characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) == null) { continue; } if (characteristics.get(CameraCharacteristics.LENS_FACING) == lensFacing.getFacing()) { sensorArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); flashSupport = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (width < 0 || height < 0) { cameraSize = map.getOutputSizes(SurfaceTexture.class)[0]; } else { cameraSize = getClosestSupportedSize(Arrays.asList(map.getOutputSizes(SurfaceTexture.class)), width, height); } Log.v(TAG, "cameraSize =" + cameraSize); HandlerThread thread = new HandlerThread("OpenCamera"); thread.start(); Handler backgroundHandler = new Handler(thread.getLooper()); cameraManager.openCamera(cameraId, cameraDeviceCallback, backgroundHandler); return; } } } catch (CameraAccessException e) { e.printStackTrace(); } }
Example 14
Source File: Camera2.java From SimpleVideoEditor with Apache License 2.0 | 4 votes |
private void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) { for (android.util.Size size : map.getOutputSizes(ImageFormat.JPEG)) { mPictureSizes.add(new Size(size.getWidth(), size.getHeight())); } }
Example 15
Source File: Camera2.java From SimpleVideoEditor with Apache License 2.0 | 4 votes |
private void collectCameraInfo() { try { mCameraCharacteristics = mCameraManager.getCameraCharacteristics((String) mConfig.getCurrentCameraId()); StreamConfigurationMap map = mCameraCharacteristics. get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { // TODO: handle error here!!! throw new IllegalStateException("Failed to get configuration map: " + mConfig.getCurrentCameraId()); } mPreviewSizes.clear(); for (android.util.Size size : map.getOutputSizes(mPreview.getOutputClass())) { int width = size.getWidth(); int height = size.getHeight(); if (width <= MAX_PREVIEW_WIDTH && height <= MAX_PREVIEW_HEIGHT) { mPreviewSizes.add(new Size(width, height)); } } mPictureSizes.clear(); collectPictureSizes(mPictureSizes, map); // for (AspectRatio ratio : mPreviewSizes.ratios()) { // if (!mPictureSizes.ratios().contains(ratio)) { // mPreviewSizes.remove(ratio); // } // } if (!mPreviewSizes.ratios().contains(mAspectRatio)) { mAspectRatio = mPreviewSizes.ratios().iterator().next(); } // 检查当前相机是否支持auto focus int[] afmodes = mCameraCharacteristics.get( CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES); // Auto focus不支持 if (afmodes == null || afmodes.length == 0 || (afmodes.length == 1 && afmodes[0] == CameraCharacteristics.CONTROL_AF_MODE_OFF)) { mDeviceInfo.getDeviceById((String) mConfig.getCurrentCameraId()).setAFSupported(false); } else { mDeviceInfo.getDeviceById((String) mConfig.getCurrentCameraId()).setAFSupported(true); } // 检查当前相机是否支持自动白平衡 int[] awbmodes = mCameraCharacteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES); if (awbmodes == null || awbmodes.length == 0 || (awbmodes.length == 1 && awbmodes[0] == CameraCharacteristics.CONTROL_AWB_MODE_OFF)) { mDeviceInfo.getDeviceById((String) mConfig.getCurrentCameraId()).setAWBSupported(false); } else { mDeviceInfo.getDeviceById((String) mConfig.getCurrentCameraId()).setAWBSupported(true); } } catch (CameraAccessException e) { e.printStackTrace(); // TODO: handle error here!!! } }
Example 16
Source File: CameraUtils.java From SimpleSmsRemote with MIT License | 4 votes |
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) public static MyCameraInfo CreateFromCameraCharacteristics(String cameraId, CameraCharacteristics characteristics) { StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); Size[] outputSizes = configMap.getOutputSizes(ImageFormat.JPEG); List<int[]> outputResolutions = new ArrayList<>(); for (Size outputSize : outputSizes) { outputResolutions.add(new int[]{outputSize.getWidth(), outputSize.getHeight()}); } MyCameraInfo cameraInfo = new MyCameraInfo(cameraId, outputResolutions); // supported functionality depends on the supported hardware level switch (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) { case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3: case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL: case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: cameraInfo.setAutofocusSupport(true); case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: // only supports camera 1 api features break; } int[] ints = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES); if (characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) cameraInfo.setFlashlightSupport(true); Integer lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING); if (lensFacing != null) { if (lensFacing == CameraCharacteristics.LENS_FACING_BACK) cameraInfo.setLensFacing(LensFacing.BACK); else if (lensFacing == CameraCharacteristics.LENS_FACING_FRONT) cameraInfo.setLensFacing(LensFacing.FRONT); else if (lensFacing == CameraCharacteristics.LENS_FACING_EXTERNAL) cameraInfo.setLensFacing(LensFacing.EXTERNAL); } /* jpeg is always supported boolean isSupported = configMap.isOutputSupportedFor(0x100); */ //TODO add more info return cameraInfo; }
Example 17
Source File: VideoCaptureCamera2.java From 365browser with Apache License 2.0 | 4 votes |
static VideoCaptureFormat[] getDeviceSupportedFormats(int id) { final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(id); if (cameraCharacteristics == null) return null; final int[] capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); // Per-format frame rate via getOutputMinFrameDuration() is only available if the // property REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR is set. boolean minFrameDurationAvailable = false; for (int cap : capabilities) { if (cap == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) { minFrameDurationAvailable = true; break; } } ArrayList<VideoCaptureFormat> formatList = new ArrayList<VideoCaptureFormat>(); final StreamConfigurationMap streamMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); final int[] formats = streamMap.getOutputFormats(); for (int format : formats) { final Size[] sizes = streamMap.getOutputSizes(format); if (sizes == null) continue; for (Size size : sizes) { double minFrameRate = 0.0f; if (minFrameDurationAvailable) { final long minFrameDuration = streamMap.getOutputMinFrameDuration(format, size); minFrameRate = (minFrameDuration == 0) ? 0.0f : (1.0 / kNanoSecondsToFps * minFrameDuration); } else { // TODO(mcasas): find out where to get the info from in this case. // Hint: perhaps using SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS. minFrameRate = 0.0; } formatList.add(new VideoCaptureFormat( size.getWidth(), size.getHeight(), (int) minFrameRate, 0)); } } return formatList.toArray(new VideoCaptureFormat[formatList.size()]); }
Example 18
Source File: Camera2.java From TikTok with Apache License 2.0 | 4 votes |
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) { for (android.util.Size size : map.getOutputSizes(ImageFormat.JPEG)) { mPictureSizes.add(new Size(size.getWidth(), size.getHeight())); } }
Example 19
Source File: Camera2.java From MediaPickerInstagram with Apache License 2.0 | 4 votes |
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) { for (android.util.Size size : map.getOutputSizes(ImageFormat.JPEG)) { mPictureSizes.add(new Size(size.getWidth(), size.getHeight())); } }
Example 20
Source File: CameraThread.java From GPUVideo-android with MIT License | 4 votes |
/** * start camera preview * * @param width * @param height */ @SuppressLint("MissingPermission") final void startPreview(final int width, final int height) { Log.v(TAG, "startPreview:"); try { if (cameraManager == null) return; for (String cameraId : cameraManager.getCameraIdList()) { CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); if (characteristics.get(CameraCharacteristics.LENS_FACING) == null || characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) == null) { continue; } if (characteristics.get(CameraCharacteristics.LENS_FACING) == lensFacing.getFacing()) { sensorArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); flashSupport = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (width < 0 || height < 0) { cameraSize = map.getOutputSizes(SurfaceTexture.class)[0]; } else { cameraSize = getClosestSupportedSize(Arrays.asList(map.getOutputSizes(SurfaceTexture.class)), width, height); } Log.v(TAG, "cameraSize =" + cameraSize); HandlerThread thread = new HandlerThread("OpenCamera"); thread.start(); Handler backgroundHandler = new Handler(thread.getLooper()); cameraManager.openCamera(cameraId, cameraDeviceCallback, backgroundHandler); return; } } } catch (CameraAccessException e) { e.printStackTrace(); } }