io.searchbox.core.search.aggregation.DateHistogramAggregation Java Examples
The following examples show how to use
io.searchbox.core.search.aggregation.DateHistogramAggregation.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ElasticSearchReportService.java From vind with Apache License 2.0 | 6 votes |
@Override public LinkedHashMap<ZonedDateTime, Long> getTopDays() { final String query = elasticClient.loadQueryFromFile("topDays", getEsFilters(), this.configuration.getMessageWrapper(), this.configuration.getMessageWrapper(), this.configuration.getApplicationId(), this.configuration.getMessageWrapper(), this.configuration.getMessageWrapper(), this.configuration.getMessageWrapper(), this.getFrom().toInstant().toEpochMilli(), this.getTo().toInstant().toEpochMilli(), this.configuration.getMessageWrapper()); final SearchResult searchResult = elasticClient.getQuery(query); final LinkedHashMap<ZonedDateTime, Long> result = new LinkedHashMap<>(); searchResult.getAggregations().getDateHistogramAggregation("days" ) .getBuckets().stream().sorted(Comparator.comparingLong(DateHistogramAggregation.DateHistogram::getCount).reversed()) .forEach( dateHistogram -> result.put(ZonedDateTime.ofInstant(Instant.ofEpochMilli(dateHistogram.getTime()), this.getZoneId()), dateHistogram.getCount())); return result; }
Example #2
Source File: ElasticsearchService.java From c2mon with GNU Lesser General Public License v3.0 | 4 votes |
private List<Object[]> getAggregatedHistory(Long id, Long min, Long max, String aggregate) { // Figure out the right interval String interval = aggregate.equals("auto") ? getInterval(min, max) : aggregate; log.info("Using interval: " + interval); String query = String.format("{\n" + " \"size\" : " + maxResults + ",\n" + " \"query\" : {\n" + " \"term\" : {\n" + " \"id\" : %d\n" + " }\n" + " },\n" + " \"aggregations\" : {\n" + " \"time-range\" : {\n" + " \"filter\" : {\n" + " \"range\" : {\n" + " \"timestamp\" : {\n" + " \"from\" : %d,\n" + " \"to\" : %d,\n" + " \"include_lower\" : true,\n" + " \"include_upper\" : true\n" + " }\n" + " }\n" + " },\n" + " \"aggregations\" : {\n" + " \"events-per-interval\" : {\n" + " \"date_histogram\" : {\n" + " \"field\" : \"timestamp\",\n" + " \"interval\" : \"%s\"\n" + " },\n" + " \"aggregations\" : {\n" + " \"avg-value\" : {\n" + " \"avg\" : {\n" + " \"field\" : \"value\"\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}", id, min, max, aggregate); Search search = new Search.Builder(query).addIndex(timeSeriesIndex).build(); long start = System.currentTimeMillis(); try { List<Object[]> results = new ArrayList<>(); SearchResult result = client.execute(search); DateHistogramAggregation aggregation = result.getAggregations().getFilterAggregation("time-range").getDateHistogramAggregation("events-per-interval"); for (DateHistogram bucket : aggregation.getBuckets()) { AvgAggregation avg = bucket.getAvgAggregation("avg-value"); results.add(new Object[]{Long.parseLong(bucket.getTimeAsString()), avg.getAvg()}); } log.info("Loaded {} values in {}ms", results.size(), System.currentTimeMillis() - start); return results; } catch (IOException e) { throw new RuntimeException("Error querying history for tag #" + id, e); } }
Example #3
Source File: ESMetricsAccessor.java From apiman with Apache License 2.0 | 4 votes |
/** * @see io.apiman.manager.api.core.IMetricsAccessor#getUsage(java.lang.String, java.lang.String, java.lang.String, io.apiman.manager.api.beans.metrics.HistogramIntervalType, org.joda.time.DateTime, org.joda.time.DateTime) */ @SuppressWarnings("nls") @Override public UsageHistogramBean getUsage(String organizationId, String apiId, String version, HistogramIntervalType interval, DateTime from, DateTime to) { UsageHistogramBean rval = new UsageHistogramBean(); Map<String, UsageDataPoint> index = generateHistogramSkeleton(rval, from, to, interval, UsageDataPoint.class); try { String query = "{" + " \"query\": {" + " \"bool\": {" + " \"filter\": [{" + " \"term\": {" + " \"apiOrgId\": \"${apiOrgId}\"" + " }" + " }, {" + " \"term\": {" + " \"apiId\": \"${apiId}\"" + " }" + " }, {" + " \"term\": {" + " \"apiVersion\": \"${apiVersion}\"" + " }" + " }," + " {" + " \"range\": {" + " \"requestStart\": {" + " \"gte\": \"${from}\"," + " \"lte\": \"${to}\"" + " }" + " }" + " }" + " ]" + " }" + " }," + " \"size\": 0," + " \"aggs\": {" + " \"histogram\": {" + " \"date_histogram\": {" + " \"field\": \"requestStart\"," + " \"interval\": \"${interval}\"" + " }" + " }" + " }" + "}"; Map<String, String> params = new HashMap<>(); params.put("from", formatDate(from)); params.put("to", formatDate(to)); params.put("apiOrgId", organizationId.replace('"', '_')); params.put("apiId", apiId.replace('"', '_')); params.put("apiVersion", version.replace('"', '_')); params.put("interval", interval.name()); StrSubstitutor ss = new StrSubstitutor(params); query = ss.replace(query); Search search = new Search.Builder(query).addIndex(INDEX_NAME).addType("request").build(); SearchResult response = getEsClient().execute(search); MetricAggregation aggregations = response.getAggregations(); DateHistogramAggregation aggregation = aggregations.getDateHistogramAggregation("histogram"); if (aggregation != null) { List<DateHistogram> buckets = aggregation.getBuckets(); for (DateHistogram entry : buckets) { String keyAsString = entry.getTimeAsString(); if (index.containsKey(keyAsString)) { index.get(keyAsString).setCount(entry.getCount()); } } } } catch (IOException e) { log.error(e); } return rval; }
Example #4
Source File: ESMetricsAccessor.java From apiman with Apache License 2.0 | 4 votes |
/** * @see io.apiman.manager.api.core.IMetricsAccessor#getResponseStats(java.lang.String, java.lang.String, java.lang.String, io.apiman.manager.api.beans.metrics.HistogramIntervalType, org.joda.time.DateTime, org.joda.time.DateTime) */ @SuppressWarnings("nls") @Override public ResponseStatsHistogramBean getResponseStats(String organizationId, String apiId, String version, HistogramIntervalType interval, DateTime from, DateTime to) { ResponseStatsHistogramBean rval = new ResponseStatsHistogramBean(); Map<String, ResponseStatsDataPoint> index = generateHistogramSkeleton(rval, from, to, interval, ResponseStatsDataPoint.class); try { String query = "{" + " \"query\": {" + " \"bool\": {" + " \"filter\": [{" + " \"term\": {" + " \"apiOrgId\": \"${apiOrgId}\"" + " }" + " }, {" + " \"term\": {" + " \"apiId\": \"${apiId}\"" + " }" + " }, {" + " \"term\": {" + " \"apiVersion\": \"${apiVersion}\"" + " }" + " }, {" + " \"range\": {" + " \"requestStart\": {" + " \"gte\": \"${from}\"," + " \"lte\": \"${to}\"" + " }" + " }" + " }]" + " }" + " }," + " \"size\": 0," + " \"aggs\": {" + " \"histogram\": {" + " \"date_histogram\": {" + " \"field\": \"requestStart\"," + " \"interval\": \"${interval}\"" + " }," + " \"aggs\": {" + " \"total_failures\": {" + " \"filter\": {" + " \"term\": {" + " \"failure\": true" + " }" + " }" + " }," + " \"total_errors\": {" + " \"filter\": {" + " \"term\": {" + " \"error\": true" + " }" + " }" + " }" + " }" + " }" + " }" + "}"; Map<String, String> params = new HashMap<>(); params.put("from", formatDate(from)); params.put("to", formatDate(to)); params.put("apiOrgId", organizationId.replace('"', '_')); params.put("apiId", apiId.replace('"', '_')); params.put("apiVersion", version.replace('"', '_')); params.put("interval", interval.name()); StrSubstitutor ss = new StrSubstitutor(params); query = ss.replace(query); Search search = new Search.Builder(query).addIndex(INDEX_NAME).addType("request").build(); SearchResult response = getEsClient().execute(search); MetricAggregation aggregations = response.getAggregations(); DateHistogramAggregation aggregation = aggregations.getDateHistogramAggregation("histogram"); if (aggregation != null) { List<DateHistogram> buckets = aggregation.getBuckets(); for (DateHistogram entry : buckets) { String keyAsString = entry.getTimeAsString(); if (index.containsKey(keyAsString)) { FilterAggregation totalFailuresAgg = entry.getFilterAggregation("total_failures"); FilterAggregation totalErrorsAgg = entry.getFilterAggregation("total_errors"); long failures = totalFailuresAgg.getCount(); long errors = totalErrorsAgg.getCount(); ResponseStatsDataPoint point = index.get(keyAsString); point.setTotal(entry.getCount()); point.setFailures(failures); point.setErrors(errors); } } } } catch (IOException e) { log.error(e); } return rval; }