android.util.Range Java Examples
The following examples show how to use
android.util.Range.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CustomVideoCapturerCamera2.java From opentok-android-sdk-samples with MIT License | 6 votes |
private Range<Integer> selectCameraFpsRange(String camId, final int fps) throws CameraAccessException { for (String id : cameraManager.getCameraIdList()) { if (id.equals(camId)) { CameraCharacteristics info = cameraManager.getCameraCharacteristics(id); List<Range<Integer>> fpsLst = new ArrayList<>(); Collections.addAll(fpsLst, info.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)); /* sort list by error from desired fps * * Android seems to do a better job at color correction/avoid 'dark frames' issue by * selecting camera settings with the smallest lower bound on allowed frame rate * range. */ return Collections.min(fpsLst, new Comparator<Range<Integer>>() { @Override public int compare(Range<Integer> lhs, Range<Integer> rhs) { return calcError(lhs) - calcError(rhs); } private int calcError(Range<Integer> val) { return val.getLower() + Math.abs(val.getUpper() - fps); } }); } } return null; }
Example #2
Source File: NetworkPolicyManager.java From android_9.0.0_r45 with Apache License 2.0 | 6 votes |
/** {@hide} */ @Deprecated public static Iterator<Pair<ZonedDateTime, ZonedDateTime>> cycleIterator(NetworkPolicy policy) { final Iterator<Range<ZonedDateTime>> it = policy.cycleIterator(); return new Iterator<Pair<ZonedDateTime, ZonedDateTime>>() { @Override public boolean hasNext() { return it.hasNext(); } @Override public Pair<ZonedDateTime, ZonedDateTime> next() { if (hasNext()) { final Range<ZonedDateTime> r = it.next(); return Pair.create(r.getLower(), r.getUpper()); } else { return Pair.create(null, null); } } }; }
Example #3
Source File: StreamConfigurationMap.java From android_9.0.0_r45 with Apache License 2.0 | 6 votes |
private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) { sb.append("HighSpeedVideoConfigurations("); Size[] sizes = getHighSpeedVideoSizes(); for (Size size : sizes) { Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size); for (Range<Integer> range : ranges) { sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(), size.getHeight(), range.getLower(), range.getUpper())); } } // Remove the pending ", " if (sb.charAt(sb.length() - 1) == ' ') { sb.delete(sb.length() - 2, sb.length()); } sb.append(")"); }
Example #4
Source File: StreamConfigurationMap.java From android_9.0.0_r45 with Apache License 2.0 | 6 votes |
/** * Get the supported video sizes for an input high speed FPS range. * * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p> * * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()} * @return An array of video sizes to create high speed capture sessions for high speed streaming * use cases. * * @throws IllegalArgumentException if input FPS range does not exist in the return value of * getHighSpeedVideoFpsRanges * @see #getHighSpeedVideoFpsRanges() */ public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) { Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange); if (sizeCount == null || sizeCount == 0) { throw new IllegalArgumentException(String.format( "FpsRange %s does not support high speed video recording", fpsRange)); } Size[] sizes = new Size[sizeCount]; int i = 0; for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) { if (fpsRange.equals(config.getFpsRange())) { sizes[i++] = config.getSize(); } } return sizes; }
Example #5
Source File: Camera2Session.java From VideoCRE with MIT License | 6 votes |
private void findCaptureFormat() { checkIsOnCameraThread(); Range<Integer>[] fpsRanges = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges); List<CaptureFormat.FramerateRange> framerateRanges = Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor); List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics); Logging.d(TAG, "Available preview sizes: " + sizes); Logging.d(TAG, "Available fps ranges: " + framerateRanges); if (framerateRanges.isEmpty() || sizes.isEmpty()) { reportError("No supported capture formats."); return; } final CaptureFormat.FramerateRange bestFpsRange = CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate); final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); //CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize); captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange); Logging.d(TAG, "Using capture format: " + captureFormat); }
Example #6
Source File: HighSpeedVideoConfiguration.java From android_9.0.0_r45 with Apache License 2.0 | 6 votes |
/** * Create a new {@link HighSpeedVideoConfiguration}. * * @param width image width, in pixels (positive) * @param height image height, in pixels (positive) * @param fpsMin minimum frames per second for the configuration (positive) * @param fpsMax maximum frames per second for the configuration (larger or equal to 60) * * @throws IllegalArgumentException * if width/height/fpsMin were not positive or fpsMax less than 60 * * @hide */ public HighSpeedVideoConfiguration( final int width, final int height, final int fpsMin, final int fpsMax, final int batchSizeMax) { if (fpsMax < HIGH_SPEED_MAX_MINIMAL_FPS) { throw new IllegalArgumentException("fpsMax must be at least " + HIGH_SPEED_MAX_MINIMAL_FPS); } mFpsMax = fpsMax; mWidth = checkArgumentPositive(width, "width must be positive"); mHeight = checkArgumentPositive(height, "height must be positive"); mFpsMin = checkArgumentPositive(fpsMin, "fpsMin must be positive"); mSize = new Size(mWidth, mHeight); mBatchSizeMax = checkArgumentPositive(batchSizeMax, "batchSizeMax must be positive"); mFpsRange = new Range<Integer>(mFpsMin, mFpsMax); }
Example #7
Source File: DataManager.java From okuki with Apache License 2.0 | 6 votes |
public void loadMore() { if (!loading.get()) { setLoading(true); loadData(pageSize, results.size()) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .doOnError(error -> setLoading(false)) .subscribe( list -> { if (!list.isEmpty()) { int start = results.size(); int end = start + list.size(); results.addAll(list); if (start > 0) { rangeInserted.call(new Range<>(start, end)); } else { listUpdated.call(null); } } setLoading(false); }, Errors.log()); } }
Example #8
Source File: Camera2Session.java From webrtc_android with MIT License | 6 votes |
private void findCaptureFormat() { checkIsOnCameraThread(); Range<Integer>[] fpsRanges = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges); List<CaptureFormat.FramerateRange> framerateRanges = Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor); List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics); Logging.d(TAG, "Available preview sizes: " + sizes); Logging.d(TAG, "Available fps ranges: " + framerateRanges); if (framerateRanges.isEmpty() || sizes.isEmpty()) { reportError("No supported capture formats."); return; } final CaptureFormat.FramerateRange bestFpsRange = CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate); final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize); captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange); Logging.d(TAG, "Using capture format: " + captureFormat); }
Example #9
Source File: Camera2Enumerator.java From VideoCRE with MIT License | 5 votes |
static List<CaptureFormat.FramerateRange> convertFramerates( Range<Integer>[] arrayRanges, int unitFactor) { final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>(); for (Range<Integer> range : arrayRanges) { ranges.add(new CaptureFormat.FramerateRange( range.getLower() * unitFactor, range.getUpper() * unitFactor)); } return ranges; }
Example #10
Source File: UiAutomationElement.java From appium-uiautomator2-server with Apache License 2.0 | 5 votes |
/** * A snapshot of all attributes is taken at construction. The attributes of a * {@code UiAutomationElement} instance are immutable. If the underlying * {@link AccessibilityNodeInfo} is updated, a new {@code UiAutomationElement} * instance will be created in */ private UiAutomationElement(AccessibilityNodeInfo node, int index) { super(checkNotNull(node)); Map<Attribute, Object> attributes = new LinkedHashMap<>(); // The same sequence will be used for node attributes in xml page source put(attributes, Attribute.INDEX, index); put(attributes, Attribute.PACKAGE, charSequenceToNullableString(node.getPackageName())); put(attributes, Attribute.CLASS, charSequenceToNullableString(node.getClassName())); put(attributes, Attribute.TEXT, AccessibilityNodeInfoHelpers.getText(node, true)); put(attributes, Attribute.ORIGINAL_TEXT, AccessibilityNodeInfoHelpers.getText(node, false)); put(attributes, Attribute.CONTENT_DESC, charSequenceToNullableString(node.getContentDescription())); put(attributes, Attribute.RESOURCE_ID, node.getViewIdResourceName()); put(attributes, Attribute.CHECKABLE, node.isCheckable()); put(attributes, Attribute.CHECKED, node.isChecked()); put(attributes, Attribute.CLICKABLE, node.isClickable()); put(attributes, Attribute.ENABLED, node.isEnabled()); put(attributes, Attribute.FOCUSABLE, node.isFocusable()); put(attributes, Attribute.FOCUSED, node.isFocused()); put(attributes, Attribute.LONG_CLICKABLE, node.isLongClickable()); put(attributes, Attribute.PASSWORD, node.isPassword()); put(attributes, Attribute.SCROLLABLE, node.isScrollable()); Range<Integer> selectionRange = AccessibilityNodeInfoHelpers.getSelectionRange(node); if (selectionRange != null) { attributes.put(Attribute.SELECTION_START, selectionRange.getLower()); attributes.put(Attribute.SELECTION_END, selectionRange.getUpper()); } put(attributes, Attribute.SELECTED, node.isSelected()); put(attributes, Attribute.BOUNDS, AccessibilityNodeInfoHelpers.getVisibleBounds(node).toShortString()); put(attributes, Attribute.DISPLAYED, node.isVisibleToUser()); // Skip CONTENT_SIZE as it is quite expensive to compute it for each element this.attributes = Collections.unmodifiableMap(attributes); this.children = buildChildren(node); }
Example #11
Source File: Api21Builder.java From jellyfin-androidtv with GNU General Public License v2.0 | 5 votes |
private void addAudioCapabilities(MediaCodecInfo.CodecCapabilities codecCapabilities, CodecProfile profile) { MediaCodecInfo.AudioCapabilities audioCaps = codecCapabilities.getAudioCapabilities(); ArrayList<ProfileCondition> conditions = new ArrayList<>(); // Audio channels int maxAudioChannels = audioCaps.getMaxInputChannelCount(); if (maxAudioChannels == 5) { maxAudioChannels = 6; } String channels = String.valueOf(maxAudioChannels); conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.AudioChannels, channels)); // Audio sample rate // TODO: Add this later. There currently is no profile condition support for it // Audio max bitrate Range<Integer> bitrateRange = audioCaps.getBitrateRange(); String maxBitrate = String.valueOf(bitrateRange.getUpper()); conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.AudioBitrate, maxBitrate)); // Audio min bitrate String minBitrate = String.valueOf(bitrateRange.getLower()); conditions.add(new ProfileCondition(ProfileConditionType.GreaterThanEqual, ProfileConditionValue.AudioBitrate, minBitrate)); profile.setConditions(conditions.toArray(new ProfileCondition[conditions.size()])); }
Example #12
Source File: AccessibilityNodeInfoHelpers.java From appium-uiautomator2-server with Apache License 2.0 | 5 votes |
@Nullable public static Range<Integer> getSelectionRange(@Nullable AccessibilityNodeInfo nodeInfo) { if (nodeInfo == null) { return null; } int selectionStart = nodeInfo.getTextSelectionStart(); int selectionEnd = nodeInfo.getTextSelectionEnd(); if (selectionStart >= 0 && selectionStart != selectionEnd) { return new Range<>(selectionStart, selectionEnd); } return null; }
Example #13
Source File: Api21Builder.java From jellyfin-androidtv with GNU General Public License v2.0 | 5 votes |
private void addVideoCapabilities(MediaCodecInfo.CodecCapabilities codecCapabilities, CodecProfile profile) { MediaCodecInfo.VideoCapabilities videoCaps = codecCapabilities.getVideoCapabilities(); ArrayList<ProfileCondition> conditions = new ArrayList<>(); conditions.add(new ProfileCondition(ProfileConditionType.NotEquals, ProfileConditionValue.IsAnamorphic, "true")); if (profile.getCodec() != null && profile.getCodec().toLowerCase().contains(CodecTypes.H264)) { conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.VideoBitDepth, "8")); } // Video max bitrate Range<Integer> bitrateRange = videoCaps.getBitrateRange(); String maxBitrate = String.valueOf(bitrateRange.getUpper()); conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.VideoBitrate, maxBitrate)); // Video min bitrate String minBitrate = String.valueOf(bitrateRange.getLower()); conditions.add(new ProfileCondition(ProfileConditionType.GreaterThanEqual, ProfileConditionValue.VideoBitrate, minBitrate)); // Video max height Range<Integer> heightRange = videoCaps.getSupportedHeights(); String maxHeight = String.valueOf(heightRange.getUpper()); //conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.Height, maxHeight)); // Video min height String minHeight = String.valueOf(heightRange.getLower()); conditions.add(new ProfileCondition(ProfileConditionType.GreaterThanEqual, ProfileConditionValue.Height, minHeight)); // Video max width Range<Integer> widthRange = videoCaps.getSupportedHeights(); conditions.add(new ProfileCondition(ProfileConditionType.LessThanEqual, ProfileConditionValue.Width, String.valueOf(widthRange.getUpper()))); // Video min width conditions.add(new ProfileCondition(ProfileConditionType.GreaterThanEqual, ProfileConditionValue.Width, String.valueOf(widthRange.getLower()))); profile.setConditions(conditions.toArray(new ProfileCondition[conditions.size()])); AddProfileLevels(codecCapabilities, profile); }
Example #14
Source File: AndroidCamera2Settings.java From Camera2 with Apache License 2.0 | 5 votes |
public Camera2RequestSettingsSet getRequestSettings() { updateRequestSettingOrForceToDefault(CONTROL_AE_REGIONS, legacyAreasToMeteringRectangles(mMeteringAreas)); updateRequestSettingOrForceToDefault(CONTROL_AF_REGIONS, legacyAreasToMeteringRectangles(mFocusAreas)); updateRequestSettingOrForceToDefault(CONTROL_AE_TARGET_FPS_RANGE, new Range(mPreviewFpsRangeMin, mPreviewFpsRangeMax)); // TODO: mCurrentPreviewFormat updateRequestSettingOrForceToDefault(JPEG_QUALITY, mJpegCompressQuality); // TODO: mCurrentPhotoFormat mRequestSettings.set(SCALER_CROP_REGION, mCropRectangle); // TODO: mCurrentZoomIndex updateRequestSettingOrForceToDefault(CONTROL_AE_EXPOSURE_COMPENSATION, mExposureCompensationIndex); updateRequestFlashMode(); updateRequestFocusMode(); updateRequestSceneMode(); updateRequestWhiteBalance(); updateRequestSettingOrForceToDefault(CONTROL_VIDEO_STABILIZATION_MODE, mVideoStabilizationEnabled ? CONTROL_VIDEO_STABILIZATION_MODE_ON : CONTROL_VIDEO_STABILIZATION_MODE_OFF); // OIS shouldn't be on if software video stabilization is. mRequestSettings.set(LENS_OPTICAL_STABILIZATION_MODE, mVideoStabilizationEnabled ? LENS_OPTICAL_STABILIZATION_MODE_OFF : null); updateRequestSettingOrForceToDefault(CONTROL_AE_LOCK, mAutoExposureLocked); updateRequestSettingOrForceToDefault(CONTROL_AWB_LOCK, mAutoWhiteBalanceLocked); // TODO: mRecordingHintEnabled updateRequestGpsData(); if (mExifThumbnailSize != null) { updateRequestSettingOrForceToDefault(JPEG_THUMBNAIL_SIZE, new android.util.Size( mExifThumbnailSize.width(), mExifThumbnailSize.height())); } else { updateRequestSettingOrForceToDefault(JPEG_THUMBNAIL_SIZE, null); } return mRequestSettings; }
Example #15
Source File: OneCameraCharacteristicsImpl.java From Camera2 with Apache License 2.0 | 5 votes |
@Override public int getMaxExposureCompensation() { if (!isExposureCompensationSupported()) { return -1; } Range<Integer> compensationRange = mCameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE); return compensationRange.getUpper(); }
Example #16
Source File: OneCameraCharacteristicsImpl.java From Camera2 with Apache License 2.0 | 5 votes |
@Override public int getMinExposureCompensation() { if (!isExposureCompensationSupported()) { return -1; } Range<Integer> compensationRange = mCameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE); return compensationRange.getLower(); }
Example #17
Source File: OneCameraCharacteristicsImpl.java From Camera2 with Apache License 2.0 | 5 votes |
@Override public boolean isExposureCompensationSupported() { // Turn off exposure compensation for Nexus 6 on L (API level 21) // because the bug in framework b/19219128. if (ApiHelper.IS_NEXUS_6 && ApiHelper.isLollipop()) { return false; } Range<Integer> compensationRange = mCameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE); return compensationRange.getLower() != 0 || compensationRange.getUpper() != 0; }
Example #18
Source File: BurstCaptureCommand.java From Camera2 with Apache License 2.0 | 5 votes |
/** * On Nexus 5 limit frame rate to 24 fps. See b/18950682. */ private static void checkAndApplyNexus5FrameRateWorkaround(RequestBuilder request) { if (ApiHelper.IS_NEXUS_5) { // For burst limit the frame rate to 24 fps. Range<Integer> frameRateBackOff = new Range<>(7, 24); request.setParam(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, frameRateBackOff); } }
Example #19
Source File: Camera2Enumerator.java From webrtc_android with MIT License | 5 votes |
static List<CaptureFormat.FramerateRange> convertFramerates( Range<Integer>[] arrayRanges, int unitFactor) { final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>(); for (Range<Integer> range : arrayRanges) { ranges.add(new CaptureFormat.FramerateRange( range.getLower() * unitFactor, range.getUpper() * unitFactor)); } return ranges; }
Example #20
Source File: Camera2Proxy.java From mobile-ar-sensor-logger with GNU General Public License v3.0 | 5 votes |
private void setExposureAndIso() { Long exposureNanos = CameraCaptureActivity.mDesiredExposureTime; Long desiredIsoL = 30L * 30000000L / exposureNanos; Integer desiredIso = desiredIsoL.intValue(); if (!expoStats.isEmpty()) { int index = expoStats.size() / 2; Long actualExpo = expoStats.get(index).mExposureNanos; Integer actualIso = expoStats.get(index).mIso; if (actualExpo <= exposureNanos) { exposureNanos = actualExpo; desiredIso = actualIso; } else { desiredIsoL = actualIso * actualExpo / exposureNanos; desiredIso = desiredIsoL.intValue(); } } // fix exposure mPreviewRequestBuilder.set( CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_OFF); Range<Long> exposureTimeRange = mCameraCharacteristics.get( CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE); if (exposureTimeRange != null) { Log.d(TAG, "exposure time range " + exposureTimeRange.toString()); } mPreviewRequestBuilder.set( CaptureRequest.SENSOR_EXPOSURE_TIME, exposureNanos); Log.d(TAG, "Exposure time set to " + exposureNanos); // fix ISO Range<Integer> isoRange = mCameraCharacteristics.get( CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE); if (isoRange != null) { Log.d(TAG, "ISO range " + isoRange.toString()); } mPreviewRequestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, desiredIso); Log.d(TAG, "ISO set to " + desiredIso); }
Example #21
Source File: MarshalQueryableRange.java From android_9.0.0_r45 with Apache License 2.0 | 5 votes |
@Override public int calculateMarshalSize(Range<T> value) { int nativeSize = getNativeSize(); if (nativeSize != NATIVE_SIZE_DYNAMIC) { return nativeSize; } else { int lowerSize = mNestedTypeMarshaler.calculateMarshalSize(value.getLower()); int upperSize = mNestedTypeMarshaler.calculateMarshalSize(value.getUpper()); return lowerSize + upperSize; } }
Example #22
Source File: MultipathPolicyTracker.java From android_9.0.0_r45 with Apache License 2.0 | 5 votes |
private long getRemainingDailyBudget(long limitBytes, Range<ZonedDateTime> cycle) { final long start = cycle.getLower().toInstant().toEpochMilli(); final long end = cycle.getUpper().toInstant().toEpochMilli(); final long totalBytes = getNetworkTotalBytes(start, end); final long remainingBytes = totalBytes == -1 ? 0 : Math.max(0, limitBytes - totalBytes); // 1 + ((end - now - 1) / millisInDay with integers is equivalent to: // ceil((double)(end - now) / millisInDay) final long remainingDays = 1 + ((end - mClock.millis() - 1) / TimeUnit.DAYS.toMillis(1)); return remainingBytes / Math.max(1, remainingDays); }
Example #23
Source File: LegacyMetadataMapper.java From android_9.0.0_r45 with Apache License 2.0 | 4 votes |
private static int[] convertAeFpsRangeToLegacy(Range<Integer> fpsRange) { int[] legacyFps = new int[2]; legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] = fpsRange.getLower(); legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] = fpsRange.getUpper(); return legacyFps; }
Example #24
Source File: VideoCaptureCamera2.java From 365browser with Apache License 2.0 | 4 votes |
@Override public boolean allocate(int width, int height, int frameRate) { Log.d(TAG, "allocate: requested (%d x %d) @%dfps", width, height, frameRate); ThreadUtils.assertOnUiThread(); synchronized (mCameraStateLock) { if (mCameraState == CameraState.OPENING || mCameraState == CameraState.CONFIGURING) { Log.e(TAG, "allocate() invoked while Camera is busy opening/configuring."); return false; } } final CameraCharacteristics cameraCharacteristics = getCameraCharacteristics(mId); final StreamConfigurationMap streamMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // Find closest supported size. final Size[] supportedSizes = streamMap.getOutputSizes(ImageFormat.YUV_420_888); final Size closestSupportedSize = findClosestSizeInArray(supportedSizes, width, height); if (closestSupportedSize == null) { Log.e(TAG, "No supported resolutions."); return false; } final List<Range<Integer>> fpsRanges = Arrays.asList(cameraCharacteristics.get( CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)); if (fpsRanges.isEmpty()) { Log.e(TAG, "No supported framerate ranges."); return false; } final List<FramerateRange> framerateRanges = new ArrayList<FramerateRange>(fpsRanges.size()); // On some legacy implementations FPS values are multiplied by 1000. Multiply by 1000 // everywhere for consistency. Set fpsUnitFactor to 1 if fps ranges are already multiplied // by 1000. final int fpsUnitFactor = fpsRanges.get(0).getUpper() > 1000 ? 1 : 1000; for (Range<Integer> range : fpsRanges) { framerateRanges.add(new FramerateRange( range.getLower() * fpsUnitFactor, range.getUpper() * fpsUnitFactor)); } final FramerateRange aeFramerateRange = getClosestFramerateRange(framerateRanges, frameRate * 1000); mAeFpsRange = new Range<Integer>( aeFramerateRange.min / fpsUnitFactor, aeFramerateRange.max / fpsUnitFactor); Log.d(TAG, "allocate: matched (%d x %d) @[%d - %d]", closestSupportedSize.getWidth(), closestSupportedSize.getHeight(), mAeFpsRange.getLower(), mAeFpsRange.getUpper()); // |mCaptureFormat| is also used to configure the ImageReader. mCaptureFormat = new VideoCaptureFormat(closestSupportedSize.getWidth(), closestSupportedSize.getHeight(), frameRate, ImageFormat.YUV_420_888); mCameraNativeOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); // TODO(mcasas): The following line is correct for N5 with prerelease Build, // but NOT for N7 with a dev Build. Figure out which one to support. mInvertDeviceOrientationReadings = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK; return true; }
Example #25
Source File: Camera2Session.java From webrtc_android with MIT License | 4 votes |
@Override public void onConfigured(CameraCaptureSession session) { checkIsOnCameraThread(); Logging.d(TAG, "Camera capture session configured."); captureSession = session; try { /* * The viable options for video capture requests are: * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality * post-processing. * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording * quality. */ final CaptureRequest.Builder captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); // Set auto exposure fps range. captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor, captureFormat.framerate.max / fpsUnitFactor)); captureRequestBuilder.set( CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); chooseStabilizationMode(captureRequestBuilder); chooseFocusMode(captureRequestBuilder); captureRequestBuilder.addTarget(surface); session.setRepeatingRequest( captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler); } catch (CameraAccessException e) { reportError("Failed to start capture request. " + e); return; } surfaceTextureHelper.startListening((VideoFrame frame) -> { checkIsOnCameraThread(); if (state != SessionState.RUNNING) { Logging.d(TAG, "Texture frame captured but camera is no longer running."); return; } if (!firstFrameReported) { firstFrameReported = true; final int startTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs); camera2StartTimeMsHistogram.addSample(startTimeMs); } // Undo the mirror that the OS "helps" us with. // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int) // Also, undo camera orientation, we report it as rotation instead. final VideoFrame modifiedFrame = new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix( (TextureBufferImpl) frame.getBuffer(), /* mirror= */ isCameraFrontFacing, /* rotation= */ -cameraOrientation), /* rotation= */ getFrameOrientation(), frame.getTimestampNs()); events.onFrameCaptured(Camera2Session.this, modifiedFrame); modifiedFrame.release(); }); Logging.d(TAG, "Camera device successfully started."); callback.onDone(Camera2Session.this); }
Example #26
Source File: UiObjectElement.java From appium-uiautomator2-server with Apache License 2.0 | 4 votes |
@Nullable @Override public String getAttribute(String attr) throws UiObjectNotFoundException { final Attribute dstAttribute = Attribute.fromString(attr); if (dstAttribute == null) { throw generateNoAttributeException(attr); } final Object result; switch (dstAttribute) { case TEXT: result = getText(); break; case CONTENT_DESC: result = element.getContentDescription(); break; case CLASS: result = element.getClassName(); break; case RESOURCE_ID: result = getResourceId(); break; case CONTENT_SIZE: result = ElementHelpers.getContentSize(this); break; case ENABLED: result = element.isEnabled(); break; case CHECKABLE: result = element.isCheckable(); break; case CHECKED: result = element.isChecked(); break; case CLICKABLE: result = element.isClickable(); break; case FOCUSABLE: result = element.isFocusable(); break; case FOCUSED: result = element.isFocused(); break; case LONG_CLICKABLE: result = element.isLongClickable(); break; case SCROLLABLE: result = element.isScrollable(); break; case SELECTED: result = element.isSelected(); break; case DISPLAYED: result = element.exists() && AccessibilityNodeInfoHelpers.isVisible(AccessibilityNodeInfoGetter.fromUiObject(element)); break; case PASSWORD: result = AccessibilityNodeInfoHelpers.isPassword(AccessibilityNodeInfoGetter.fromUiObject(element)); break; case BOUNDS: result = element.getVisibleBounds().toShortString(); break; case PACKAGE: { result = AccessibilityNodeInfoHelpers.getPackageName(AccessibilityNodeInfoGetter.fromUiObject(element)); break; } case SELECTION_END: case SELECTION_START: Range<Integer> selectionRange = AccessibilityNodeInfoHelpers.getSelectionRange(AccessibilityNodeInfoGetter.fromUiObject(element)); result = selectionRange == null ? null : (dstAttribute == Attribute.SELECTION_END ? selectionRange.getUpper() : selectionRange.getLower()); break; default: throw generateNoAttributeException(attr); } if (result == null) { return null; } return (result instanceof String) ? (String) result : String.valueOf(result); }
Example #27
Source File: UiObject2Element.java From appium-uiautomator2-server with Apache License 2.0 | 4 votes |
@Nullable @Override public String getAttribute(String attr) throws UiObjectNotFoundException { final Attribute dstAttribute = Attribute.fromString(attr); if (dstAttribute == null) { throw generateNoAttributeException(attr); } final Object result; switch (dstAttribute) { case TEXT: result = getText(); break; case CONTENT_DESC: result = element.getContentDescription(); break; case CLASS: result = element.getClassName(); break; case RESOURCE_ID: result = element.getResourceName(); break; case CONTENT_SIZE: result = ElementHelpers.getContentSize(this); break; case ENABLED: result = element.isEnabled(); break; case CHECKABLE: result = element.isCheckable(); break; case CHECKED: result = element.isChecked(); break; case CLICKABLE: result = element.isClickable(); break; case FOCUSABLE: result = element.isFocusable(); break; case FOCUSED: result = element.isFocused(); break; case LONG_CLICKABLE: result = element.isLongClickable(); break; case SCROLLABLE: result = element.isScrollable(); break; case SELECTED: result = element.isSelected(); break; case DISPLAYED: result = AccessibilityNodeInfoHelpers.isVisible(AccessibilityNodeInfoGetter.fromUiObject(element)); break; case PASSWORD: result = AccessibilityNodeInfoHelpers.isPassword(AccessibilityNodeInfoGetter.fromUiObject(element)); break; case BOUNDS: result = element.getVisibleBounds().toShortString(); break; case PACKAGE: result = AccessibilityNodeInfoHelpers.getPackageName(AccessibilityNodeInfoGetter.fromUiObject(element)); break; case SELECTION_END: case SELECTION_START: Range<Integer> selectionRange = AccessibilityNodeInfoHelpers.getSelectionRange(AccessibilityNodeInfoGetter.fromUiObject(element)); result = selectionRange == null ? null : (dstAttribute == Attribute.SELECTION_END ? selectionRange.getUpper() : selectionRange.getLower()); break; default: throw generateNoAttributeException(attr); } if (result == null) { return null; } return (result instanceof String) ? (String) result : String.valueOf(result); }
Example #28
Source File: CaptureHighSpeedVideoMode.java From Android-Slow-Motion-Camera2 with GNU General Public License v3.0 | 4 votes |
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) { // Range<Integer> fpsRange = Range.create(240, 240); Range<Integer> fpsRange = getHighestFpsRange(availableFpsRange); builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); }
Example #29
Source File: DataManager.java From okuki with Apache License 2.0 | 4 votes |
public Observable<Range<Integer>> onRangeInserted() { return rangeInserted; }
Example #30
Source File: Camera2Enumerator.java From VideoCRE with MIT License | 4 votes |
static int getFpsUnitFactor(Range<Integer>[] fpsRanges) { if (fpsRanges.length == 0) { return 1000; } return fpsRanges[0].getUpper() < 1000 ? 1000 : 1; }