org.elasticsearch.common.util.LongObjectPagedHashMap Java Examples
The following examples show how to use
org.elasticsearch.common.util.LongObjectPagedHashMap.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TopHitsAggregator.java From Elasticsearch with Apache License 2.0 | 5 votes |
public TopHitsAggregator(FetchPhase fetchPhase, SubSearchContext subSearchContext, String name, AggregationContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.fetchPhase = fetchPhase; topDocsCollectors = new LongObjectPagedHashMap<>(1, context.bigArrays()); this.subSearchContext = subSearchContext; }
Example #2
Source File: ParentToChildrenAggregator.java From Elasticsearch with Apache License 2.0 | 5 votes |
public ParentToChildrenAggregator(String name, AggregatorFactories factories, AggregationContext aggregationContext, Aggregator parent, String parentType, Query childFilter, Query parentFilter, ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource, long maxOrd, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); this.parentType = parentType; // these two filters are cached in the parser this.childFilter = aggregationContext.searchContext().searcher().createNormalizedWeight(childFilter, false); this.parentFilter = aggregationContext.searchContext().searcher().createNormalizedWeight(parentFilter, false); this.parentOrdToBuckets = aggregationContext.bigArrays().newLongArray(maxOrd, false); this.parentOrdToBuckets.fill(0, maxOrd, -1); this.parentOrdToOtherBuckets = new LongObjectPagedHashMap<>(aggregationContext.bigArrays()); this.valuesSource = valuesSource; }
Example #3
Source File: InternalGeoHashGrid.java From Elasticsearch with Apache License 2.0 | 5 votes |
@Override public InternalGeoHashGrid doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) { LongObjectPagedHashMap<List<Bucket>> buckets = null; for (InternalAggregation aggregation : aggregations) { InternalGeoHashGrid grid = (InternalGeoHashGrid) aggregation; if (buckets == null) { buckets = new LongObjectPagedHashMap<>(grid.buckets.size(), reduceContext.bigArrays()); } for (Bucket bucket : grid.buckets) { List<Bucket> existingBuckets = buckets.get(bucket.geohashAsLong); if (existingBuckets == null) { existingBuckets = new ArrayList<>(aggregations.size()); buckets.put(bucket.geohashAsLong, existingBuckets); } existingBuckets.add(bucket); } } final int size = (int) Math.min(requiredSize, buckets.size()); BucketPriorityQueue ordered = new BucketPriorityQueue(size); for (LongObjectPagedHashMap.Cursor<List<Bucket>> cursor : buckets) { List<Bucket> sameCellBuckets = cursor.value; ordered.insertWithOverflow(sameCellBuckets.get(0).reduce(sameCellBuckets, reduceContext)); } buckets.close(); Bucket[] list = new Bucket[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; i--) { list[i] = ordered.pop(); } return new InternalGeoHashGrid(getName(), requiredSize, Arrays.asList(list), pipelineAggregators(), getMetaData()); }
Example #4
Source File: InternalGeoShape.java From elasticsearch-plugin-geoshape with MIT License | 5 votes |
/** * Reduces the given aggregations to a single one and returns it. */ @Override public InternalGeoShape reduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) { LongObjectPagedHashMap<List<InternalBucket>> buckets = null; for (InternalAggregation aggregation : aggregations) { InternalGeoShape shape = (InternalGeoShape) aggregation; if (buckets == null) { buckets = new LongObjectPagedHashMap<>(shape.buckets.size(), reduceContext.bigArrays()); } for (InternalBucket bucket : shape.buckets) { List<InternalBucket> existingBuckets = buckets.get(bucket.getShapeHash()); if (existingBuckets == null) { existingBuckets = new ArrayList<>(aggregations.size()); buckets.put(bucket.getShapeHash(), existingBuckets); } existingBuckets.add(bucket); } } final int size = !reduceContext.isFinalReduce() ? (int) buckets.size() : Math.min(requiredSize, (int) buckets.size()); BucketPriorityQueue ordered = new BucketPriorityQueue(size); for (LongObjectPagedHashMap.Cursor<List<InternalBucket>> cursor : buckets) { List<InternalBucket> sameCellBuckets = cursor.value; ordered.insertWithOverflow(reduceBucket(sameCellBuckets, reduceContext)); } buckets.close(); InternalBucket[] list = new InternalBucket[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; i--) { list[i] = ordered.pop(); } return new InternalGeoShape(getName(), Arrays.asList(list), output_format, requiredSize, shardSize, pipelineAggregators(), getMetaData()); }