Java Code Examples for org.joda.time.Interval#getStart()
The following examples show how to use
org.joda.time.Interval#getStart() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: IntervalUtils.java From incubator-pinot with Apache License 2.0 | 5 votes |
/** * This method is designed to merge a list of intervals to a list of intervals with no overlap in between * @param intervals * @return * a list of intervals with no overlap in between */ public static List<Interval> mergeIntervals (List<Interval> intervals) { if(intervals == null || intervals.size() == 0) { return intervals; } // Sort Intervals Collections.sort(intervals, new Comparator<Interval>() { @Override public int compare(Interval o1, Interval o2) { return o1.getStart().compareTo(o2.getStart()); } }); // Merge intervals Stack<Interval> intervalStack = new Stack<>(); intervalStack.push(intervals.get(0)); for(int i = 1; i < intervals.size(); i++) { Interval top = intervalStack.peek(); Interval target = intervals.get(i); if(top.overlap(target) == null && (top.getEnd() != target.getStart())) { intervalStack.push(target); } else if(top.equals(target)) { continue; } else { Interval newTop = new Interval(Math.min(top.getStart().getMillis(), target.getStart().getMillis()), Math.max(top.getEnd().getMillis(), target.getEnd().getMillis())); intervalStack.pop(); intervalStack.push(newTop); } } return intervalStack; }
Example 2
Source File: ManageEnrolementPeriodsDA.java From fenixedu-academic with GNU Lesser General Public License v3.0 | 5 votes |
public AbstractEnrolmentPeriodConfiguration(Interval interval, ExecutionSemester semester) { if (interval != null) { this.start = interval.getStart(); this.end = interval.getEnd(); } this.semester = semester; }
Example 3
Source File: Lesson.java From fenixedu-academic with GNU Lesser General Public License v3.0 | 5 votes |
public boolean overlaps(final Interval interval) { if (wasFinished()) { return false; } final YearMonthDay startDateToSearch = getLessonStartDay(); if (startDateToSearch == null) { return false; } final YearMonthDay endDateToSearch = getLessonEndDay(); if (endDateToSearch == null) { return false; } final DateTime intervalStart = interval.getStart(); if (intervalStart.isAfter(endDateToSearch.toDateTimeAtMidnight().plusDays(1))) { return false; } final DateTime intervalEnd = interval.getEnd(); if (intervalEnd.isBefore(startDateToSearch.toDateTimeAtMidnight())) { return false; } final HourMinuteSecond b = getBeginHourMinuteSecond(); final HourMinuteSecond e = getEndHourMinuteSecond(); for (final YearMonthDay yearMonthDay : getAllValidLessonDatesWithoutInstancesDates(startDateToSearch, endDateToSearch)) { if (new Interval(toDateTime(yearMonthDay, b), toDateTime(yearMonthDay, e)).overlaps(interval)) { return true; } } return false; }
Example 4
Source File: HoltWintersDetector.java From incubator-pinot with Apache License 2.0 | 4 votes |
@Override public DetectionResult runDetection(Interval window, String metricUrn) { MetricEntity metricEntity = MetricEntity.fromURN(metricUrn); DateTime windowStart = window.getStart(); // align start day to the user specified week start if (Objects.nonNull(this.weekStart)) { windowStart = window.getStart().withTimeAtStartOfDay().withDayOfWeek(weekStart.getValue()).minusWeeks(1); } DateTime trainStart = getTrainingStartTime(windowStart); DatasetConfigDTO datasetConfig = this.dataFetcher.fetchData(new InputDataSpec() .withMetricIdsForDataset(Collections.singleton(metricEntity.getId()))).getDatasetForMetricId() .get(metricEntity.getId()); MetricSlice sliceData = MetricSlice.from(metricEntity.getId(), trainStart.getMillis(), window.getEndMillis(), metricEntity.getFilters()); DataFrame dfInput = fetchData(metricEntity, trainStart.getMillis(), window.getEndMillis(), datasetConfig); // Kernel smoothing if (smoothing && !TimeUnit.DAYS.equals(datasetConfig.bucketTimeGranularity().getUnit())) { int kernelSize = (int) (KERNEL_PERIOD / datasetConfig.bucketTimeGranularity().toMillis()); if (kernelSize > 1) { int kernelOffset = kernelSize / 2; double[] values = dfInput.getDoubles(COL_VALUE).values(); for (int i = 0; i <= values.length - kernelSize; i++) { values[i + kernelOffset] = AlgorithmUtils.robustMean(dfInput.getDoubles(COL_VALUE) .slice(i, i + kernelSize), kernelSize).getDouble(kernelSize - 1); } dfInput.addSeries(COL_VALUE, values); } } DataFrame dfCurr = new DataFrame(dfInput).renameSeries(COL_VALUE, COL_CURR); DataFrame dfBase = computePredictionInterval(dfInput, windowStart.getMillis(), datasetConfig.getTimezone()); DataFrame df = new DataFrame(dfCurr).addSeries(dfBase, COL_VALUE, COL_ERROR); df.addSeries(COL_DIFF, df.getDoubles(COL_CURR).subtract(df.get(COL_VALUE))); df.addSeries(COL_ANOMALY, BooleanSeries.fillValues(df.size(), false)); // Filter pattern if (pattern.equals(Pattern.UP_OR_DOWN) ) { df.addSeries(COL_PATTERN, BooleanSeries.fillValues(df.size(), true)); } else { df.addSeries(COL_PATTERN, pattern.equals(Pattern.UP) ? df.getDoubles(COL_DIFF).gt(0) : df.getDoubles(COL_DIFF).lt(0)); } df.addSeries(COL_DIFF_VIOLATION, df.getDoubles(COL_DIFF).abs().gte(df.getDoubles(COL_ERROR))); df.mapInPlace(BooleanSeries.ALL_TRUE, COL_ANOMALY, COL_PATTERN, COL_DIFF_VIOLATION); // Anomalies List<MergedAnomalyResultDTO> anomalyResults = DetectionUtils.makeAnomalies(sliceData, df, COL_ANOMALY, window.getEndMillis(), DetectionUtils.getMonitoringGranularityPeriod(this.monitoringGranularity, datasetConfig), datasetConfig); dfBase = dfBase.joinRight(df.retainSeries(COL_TIME, COL_CURR), COL_TIME); return DetectionResult.from(anomalyResults, TimeSeries.fromDataFrame(dfBase)); }
Example 5
Source File: PercentageChangeRuleDetector.java From incubator-pinot with Apache License 2.0 | 4 votes |
@Override public DetectionResult runDetection(Interval window, String metricUrn) { DateTime windowStart = window.getStart(); // align start day to the user specified week start if (Objects.nonNull(this.weekStart)) { windowStart = window.getStart().withTimeAtStartOfDay().withDayOfWeek(weekStart.getValue()).minusWeeks(1); } MetricEntity me = MetricEntity.fromURN(metricUrn); MetricSlice slice = MetricSlice.from(me.getId(), windowStart.getMillis(), window.getEndMillis(), me.getFilters(), timeGranularity); List<MetricSlice> slices = new ArrayList<>(this.baseline.scatter(slice)); slices.add(slice); InputData data = this.dataFetcher.fetchData(new InputDataSpec().withTimeseriesSlices(slices) .withMetricIdsForDataset(Collections.singletonList(slice.getMetricId())) .withMetricIds(Collections.singletonList(me.getId()))); DataFrame dfBase = this.baseline.gather(slice, data.getTimeseries()); DataFrame dfCurr = data.getTimeseries().get(slice); DatasetConfigDTO datasetConfig = data.getDatasetForMetricId().get(me.getId()); MetricConfigDTO metricConfig = data.getMetrics().get(me.getId()); // aggregate data to specified weekly granularity if (this.monitoringGranularity.endsWith(TimeGranularity.WEEKS)) { Period monitoringGranularityPeriod = DetectionUtils.getMonitoringGranularityPeriod(this.monitoringGranularity, datasetConfig); long latestDataTimeStamp = dfCurr.getLong(COL_TIME, dfCurr.size() - 1); dfCurr = DetectionUtils.aggregateByPeriod(dfCurr, windowStart, monitoringGranularityPeriod, metricConfig.getDefaultAggFunction()); dfCurr = DetectionUtils.filterIncompleteAggregation(dfCurr, latestDataTimeStamp, datasetConfig.bucketTimeGranularity(), monitoringGranularityPeriod); dfBase = DetectionUtils.aggregateByPeriod(dfBase, windowStart, monitoringGranularityPeriod, metricConfig.getDefaultAggFunction()); } dfCurr = dfCurr.renameSeries(COL_VALUE, COL_CURR); DataFrame df = new DataFrame(dfCurr).addSeries(dfBase); // calculate percentage change df.addSeries(COL_CHANGE, map((Series.DoubleFunction) values -> { if (Double.compare(values[1], 0.0) == 0) { return Double.compare(values[0], 0.0) == 0 ? 0.0 : (values[0] > 0 ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY); } return (values[0] - values[1]) / values[1]; }, df.getDoubles(COL_CURR), df.get(COL_VALUE))); // defaults df.addSeries(COL_ANOMALY, BooleanSeries.fillValues(df.size(), false)); // relative change if (!Double.isNaN(this.percentageChange)) { // consistent with pattern if (pattern.equals(Pattern.UP_OR_DOWN)) { df.addSeries(COL_PATTERN, BooleanSeries.fillValues(df.size(), true)); } else { df.addSeries(COL_PATTERN, this.pattern.equals(Pattern.UP) ? df.getDoubles(COL_CHANGE).gt(0) : df.getDoubles(COL_CHANGE).lt(0)); } df.addSeries(COL_CHANGE_VIOLATION, df.getDoubles(COL_CHANGE).abs().gte(this.percentageChange)); df.mapInPlace(BooleanSeries.ALL_TRUE, COL_ANOMALY, COL_PATTERN, COL_CHANGE_VIOLATION); } List<MergedAnomalyResultDTO> anomalies = DetectionUtils.makeAnomalies(slice, df, COL_ANOMALY, window.getEndMillis(), DetectionUtils.getMonitoringGranularityPeriod(monitoringGranularity, datasetConfig), datasetConfig); DataFrame baselineWithBoundaries = constructPercentageChangeBoundaries(df); return DetectionResult.from(anomalies, TimeSeries.fromDataFrame(baselineWithBoundaries)); }
Example 6
Source File: PersistentInterval.java From jadira with Apache License 2.0 | 4 votes |
@Override protected Object[] toConvertedColumns(Interval value) { return new Object[] { value.getStart(), value.getEnd() }; }
Example 7
Source File: TimePeriod.java From arctic-sea with Apache License 2.0 | 2 votes |
/** * Creates a new {@code TimePeriod} from an {@code Interval}. * * @param interval the interval */ public TimePeriod(Interval interval) { this(interval.getStart(), interval.getEnd()); }