org.apache.flink.util.MutableObjectIterator Java Examples
The following examples show how to use
org.apache.flink.util.MutableObjectIterator.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CoGroupRawDriver.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void prepare() throws Exception { final TaskConfig config = this.taskContext.getTaskConfig(); if (config.getDriverStrategy() != DriverStrategy.CO_GROUP_RAW) { throw new Exception("Unrecognized driver strategy for CoGoup Python driver: " + config.getDriverStrategy().name()); } final MutableObjectIterator<IT1> in1 = this.taskContext.getInput(0); final MutableObjectIterator<IT2> in2 = this.taskContext.getInput(1); IT1 reuse1 = this.taskContext.<IT1>getInputSerializer(0).getSerializer().createInstance(); IT2 reuse2 = this.taskContext.<IT2>getInputSerializer(1).getSerializer().createInstance(); this.coGroupIterator1 = new SimpleIterable<IT1>(reuse1, in1); this.coGroupIterator2 = new SimpleIterable<IT2>(reuse2, in2); if (LOG.isDebugEnabled()) { LOG.debug(this.taskContext.formatLogString("CoGroup task iterator ready.")); } }
Example #2
Source File: ReusingMergeOuterJoinIterator.java From flink with Apache License 2.0 | 6 votes |
public ReusingMergeOuterJoinIterator( OuterJoinType outerJoinType, MutableObjectIterator<T1> input1, MutableObjectIterator<T2> input2, TypeSerializer<T1> serializer1, TypeComparator<T1> comparator1, TypeSerializer<T2> serializer2, TypeComparator<T2> comparator2, TypePairComparator<T1, T2> pairComparator, MemoryManager memoryManager, IOManager ioManager, int numMemoryPages, AbstractInvokable parentTask) throws MemoryAllocationException { super(outerJoinType, input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparator, memoryManager, ioManager, numMemoryPages, parentTask); this.copy1 = serializer1.createInstance(); this.spillHeadCopy = serializer1.createInstance(); this.copy2 = serializer2.createInstance(); this.blockHeadCopy = serializer2.createInstance(); }
Example #3
Source File: BatchTask.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Creates all the serializers and comparators. */ protected void initInputsSerializersAndComparators(int numInputs, int numComparators) throws Exception { this.inputSerializers = new TypeSerializerFactory<?>[numInputs]; this.inputComparators = numComparators > 0 ? new TypeComparator<?>[numComparators] : null; this.inputIterators = new MutableObjectIterator<?>[numInputs]; ClassLoader userCodeClassLoader = getUserCodeClassLoader(); for (int i = 0; i < numInputs; i++) { final TypeSerializerFactory<?> serializerFactory = this.config.getInputSerializer(i, userCodeClassLoader); this.inputSerializers[i] = serializerFactory; this.inputIterators[i] = createInputIterator(this.inputReaders[i], this.inputSerializers[i]); } // ---------------- create the driver's comparators --------------------- for (int i = 0; i < numComparators; i++) { if (this.inputComparators != null) { final TypeComparatorFactory<?> comparatorFactory = this.config.getDriverComparator(i, userCodeClassLoader); this.inputComparators[i] = comparatorFactory.createComparator(); } } }
Example #4
Source File: AllGroupReduceDriverTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testAllReduceDriverImmutableEmpty() { try { TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = EmptyMutableObjectIterator.get(); context.setDriverStrategy(DriverStrategy.ALL_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setCollector(new DiscardingOutputCollector<Tuple2<String, Integer>>()); AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #5
Source File: UnilateralSortMerger.java From flink with Apache License 2.0 | 6 votes |
@Override public MutableObjectIterator<E> getIterator() throws InterruptedException { synchronized (this.iteratorLock) { // wait while both the iterator and the exception are not set while (this.iterator == null && this.iteratorException == null) { this.iteratorLock.wait(); } if (this.iteratorException != null) { throw new RuntimeException("Error obtaining the sorted input: " + this.iteratorException.getMessage(), this.iteratorException); } else { return this.iterator; } } }
Example #6
Source File: LongHashTableTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testSparseProbeSpilling() throws IOException, MemoryAllocationException { final int numBuildKeys = 1000000; final int numBuildVals = 1; final int numProbeKeys = 20; final int numProbeVals = 1; MutableObjectIterator<BinaryRow> buildInput = new UniformBinaryRowGenerator( numBuildKeys, numBuildVals, false); final MyHashTable table = new MyHashTable(100 * PAGE_SIZE); int expectedNumResults = (Math.min(numProbeKeys, numBuildKeys) * numBuildVals) * numProbeVals; int numRecordsInJoinResult = join(table, buildInput, new UniformBinaryRowGenerator(numProbeKeys, numProbeVals, true)); Assert.assertEquals("Wrong number of records in join result.", expectedNumResults, numRecordsInJoinResult); table.close(); table.free(); }
Example #7
Source File: AllGroupReduceDriverTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testAllReduceDriverImmutableEmpty() { try { TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = EmptyMutableObjectIterator.get(); context.setDriverStrategy(DriverStrategy.ALL_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setCollector(new DiscardingOutputCollector<Tuple2<String, Integer>>()); AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #8
Source File: ReusingMergeInnerJoinIterator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public ReusingMergeInnerJoinIterator( MutableObjectIterator<T1> input1, MutableObjectIterator<T2> input2, TypeSerializer<T1> serializer1, TypeComparator<T1> comparator1, TypeSerializer<T2> serializer2, TypeComparator<T2> comparator2, TypePairComparator<T1, T2> pairComparator, MemoryManager memoryManager, IOManager ioManager, int numMemoryPages, AbstractInvokable parentTask) throws MemoryAllocationException { super(input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparator, memoryManager, ioManager, numMemoryPages, parentTask); this.copy1 = serializer1.createInstance(); this.spillHeadCopy = serializer1.createInstance(); this.copy2 = serializer2.createInstance(); this.blockHeadCopy = serializer2.createInstance(); }
Example #9
Source File: BinaryOperatorTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public <X> MutableObjectIterator<X> getInput(int index) { MutableObjectIterator<IN> in = this.inputs.get(index); if (in == null) { // waiting from sorter try { in = this.sorters.get(index).getIterator(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted"); } this.inputs.set(index, in); } @SuppressWarnings("unchecked") MutableObjectIterator<X> input = (MutableObjectIterator<X>) this.inputs.get(index); return input; }
Example #10
Source File: NonReusingBuildSecondReOpenableHashJoinIterator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public NonReusingBuildSecondReOpenableHashJoinIterator( MutableObjectIterator<V1> firstInput, MutableObjectIterator<V2> secondInput, TypeSerializer<V1> serializer1, TypeComparator<V1> comparator1, TypeSerializer<V2> serializer2, TypeComparator<V2> comparator2, TypePairComparator<V1, V2> pairComparator, MemoryManager memManager, IOManager ioManager, AbstractInvokable ownerTask, double memoryFraction, boolean probeSideOuterJoin, boolean buildSideOuterJoin, boolean useBitmapFilters) throws MemoryAllocationException { super(firstInput, secondInput, serializer1, comparator1, serializer2, comparator2, pairComparator, memManager, ioManager, ownerTask, memoryFraction, probeSideOuterJoin, buildSideOuterJoin, useBitmapFilters); reopenHashTable = (ReOpenableMutableHashTable<V2, V1>) hashJoin; }
Example #11
Source File: TempBarrier.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void run() { final MutableObjectIterator<T> input = this.input; final TypeSerializer<T> serializer = this.serializer; final SpillingBuffer buffer = this.buffer; try { T record = serializer.createInstance(); while (this.running && ((record = input.next(record)) != null)) { serializer.serialize(record, buffer); } TempBarrier.this.writingDone(); } catch (Throwable t) { TempBarrier.this.setException(t); } }
Example #12
Source File: BufferedKVExternalSorter.java From flink with Apache License 2.0 | 6 votes |
public MutableObjectIterator<Tuple2<BinaryRow, BinaryRow>> getKVIterator() throws IOException { // 1. merge if more than maxNumFile // merge channels until sufficient file handles are available List<ChannelWithMeta> channelIDs = this.channelIDs; while (!closed && channelIDs.size() > this.maxNumFileHandles) { channelIDs = merger.mergeChannelList(channelIDs); } // 2. final merge List<FileIOChannel> openChannels = new ArrayList<>(); BinaryMergeIterator<Tuple2<BinaryRow, BinaryRow>> iterator = merger.getMergingIterator(channelIDs, openChannels); channelManager.addOpenChannels(openChannels); return iterator; }
Example #13
Source File: ReusingBuildSecondReOpenableHashJoinIterator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public ReusingBuildSecondReOpenableHashJoinIterator( MutableObjectIterator<V1> firstInput, MutableObjectIterator<V2> secondInput, TypeSerializer<V1> serializer1, TypeComparator<V1> comparator1, TypeSerializer<V2> serializer2, TypeComparator<V2> comparator2, TypePairComparator<V1, V2> pairComparator, MemoryManager memManager, IOManager ioManager, AbstractInvokable ownerTask, double memoryFraction, boolean probeSideOuterJoin, boolean buildSideOuterJoin, boolean useBitmapFilters) throws MemoryAllocationException { super(firstInput, secondInput, serializer1, comparator1, serializer2, comparator2, pairComparator, memManager, ioManager, ownerTask, memoryFraction, probeSideOuterJoin, buildSideOuterJoin, useBitmapFilters); reopenHashTable = (ReOpenableMutableHashTable<V2, V1>) hashJoin; }
Example #14
Source File: LongHashTableTest.java From flink with Apache License 2.0 | 6 votes |
private int join( MyHashTable table, MutableObjectIterator<BinaryRow> buildInput, MutableObjectIterator<BinaryRow> probeInput) throws IOException { int count = 0; BinaryRow reuseBuildSizeRow = buildSideSerializer.createInstance(); BinaryRow buildRow; while ((buildRow = buildInput.next(reuseBuildSizeRow)) != null) { table.putBuildRow(buildRow); } table.endBuild(); BinaryRow probeRow = probeSideSerializer.createInstance(); while ((probeRow = probeInput.next(probeRow)) != null) { if (table.tryProbe(probeRow)){ count += joinWithNextKey(table); } } while (table.nextMatching()){ count += joinWithNextKey(table); } return count; }
Example #15
Source File: AbstractSortMergeOuterJoinIteratorITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private Map<Integer, Collection<String>> collectData(MutableObjectIterator<Tuple2<Integer, String>> iter) throws Exception { final Map<Integer, Collection<String>> map = new HashMap<>(); Tuple2<Integer, String> pair = new Tuple2<>(); while ((pair = iter.next(pair)) != null) { final Integer key = pair.getField(0); if (!map.containsKey(key)) { map.put(key, new ArrayList<String>()); } Collection<String> values = map.get(key); final String value = pair.getField(1); values.add(value); } return map; }
Example #16
Source File: Int2SortMergeJoinOperatorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testFullOutJoin() throws Exception { int numKeys1 = 9; int numKeys2 = 10; int buildValsPerKey = 3; int probeValsPerKey = 10; MutableObjectIterator<BinaryRow> buildInput = new UniformBinaryRowGenerator(numKeys1, buildValsPerKey, true); MutableObjectIterator<BinaryRow> probeInput = new UniformBinaryRowGenerator(numKeys2, probeValsPerKey, true); buildJoin(buildInput, probeInput, FlinkJoinType.FULL, 280, numKeys2, -1); }
Example #17
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testEntryIterator() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); int result = 0; for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); result += lists[i].getKey(); } MutableObjectIterator<IntList> iter = table.getEntryIterator(); IntList target = new IntList(); int sum = 0; while((target = iter.next(target)) != null) { sum += target.getKey(); } table.close(); assertTrue(sum == result); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #18
Source File: Int2HashJoinOperatorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBuildLeftAntiJoin() throws Exception { int numKeys1 = 10; int numKeys2 = 9; int buildValsPerKey = 10; int probeValsPerKey = 3; MutableObjectIterator<BinaryRow> buildInput = new UniformBinaryRowGenerator(numKeys1, buildValsPerKey, true); MutableObjectIterator<BinaryRow> probeInput = new UniformBinaryRowGenerator(numKeys2, probeValsPerKey, true); HashJoinType type = HashJoinType.BUILD_LEFT_ANTI; Object operator = newOperator(33 * 32 * 1024, type, false); joinAndAssert(operator, buildInput, probeInput, 10, 1, 45, true); }
Example #19
Source File: UnilateralSortMerger.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public UnilateralSortMerger(MemoryManager memoryManager, List<MemorySegment> memory, IOManager ioManager, MutableObjectIterator<E> input, AbstractInvokable parentTask, TypeSerializerFactory<E> serializerFactory, TypeComparator<E> comparator, int numSortBuffers, int maxNumFileHandles, float startSpillingFraction, boolean handleLargeRecords, boolean objectReuseEnabled) throws IOException { this(memoryManager, memory, ioManager, input, parentTask, serializerFactory, comparator, numSortBuffers, maxNumFileHandles, startSpillingFraction, false, handleLargeRecords, objectReuseEnabled); }
Example #20
Source File: Int2HashJoinOperatorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBuildFirstHashRightOutJoin() throws Exception { int numKeys1 = 9; int numKeys2 = 10; int buildValsPerKey = 3; int probeValsPerKey = 10; MutableObjectIterator<BinaryRow> buildInput = new UniformBinaryRowGenerator(numKeys1, buildValsPerKey, true); MutableObjectIterator<BinaryRow> probeInput = new UniformBinaryRowGenerator(numKeys2, probeValsPerKey, true); buildJoin(buildInput, probeInput, false, true, true, 280, numKeys2, -1); }
Example #21
Source File: UnilateralSortMerger.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Sets the result iterator. By setting the result iterator, all threads that are waiting for the result * iterator are notified and will obtain it. * * @param iterator The result iterator to set. */ protected final void setResultIterator(MutableObjectIterator<E> iterator) { synchronized (this.iteratorLock) { // set the result iterator only, if no exception has occurred if (this.iteratorException == null) { this.iterator = iterator; this.iteratorLock.notifyAll(); } } }
Example #22
Source File: ReusingBuildSecondHashJoinIterator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public boolean callWithNextKey(FlatJoinFunction<V1, V2, O> matchFunction, Collector<O> collector) throws Exception { if (this.hashJoin.nextRecord()) { // we have a next record, get the iterators to the probe and build side values final MutableObjectIterator<V2> buildSideIterator = this.hashJoin.getBuildSideIterator(); final V1 probeRecord = this.hashJoin.getCurrentProbeRecord(); V2 nextBuildSideRecord = buildSideIterator.next(this.nextBuildSideObject); if (probeRecord != null && nextBuildSideRecord != null) { matchFunction.join(probeRecord, nextBuildSideRecord, collector); while (this.running && ((nextBuildSideRecord = buildSideIterator.next(nextBuildSideRecord)) != null)) { matchFunction.join(probeRecord, nextBuildSideRecord, collector); } } else { if (probeSideOuterJoin && probeRecord != null && nextBuildSideRecord == null) { matchFunction.join(probeRecord, null, collector); } if (buildSideOuterJoin && probeRecord == null && nextBuildSideRecord != null) { matchFunction.join(null, nextBuildSideRecord, collector); while (this.running && ((nextBuildSideRecord = buildSideIterator.next(nextBuildSideRecord)) != null)) { matchFunction.join(null, nextBuildSideRecord, collector); } } } return true; } else { return false; } }
Example #23
Source File: ReusingBuildSecondHashJoinIterator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public ReusingBuildSecondHashJoinIterator( MutableObjectIterator<V1> firstInput, MutableObjectIterator<V2> secondInput, TypeSerializer<V1> serializer1, TypeComparator<V1> comparator1, TypeSerializer<V2> serializer2, TypeComparator<V2> comparator2, TypePairComparator<V1, V2> pairComparator, MemoryManager memManager, IOManager ioManager, AbstractInvokable ownerTask, double memoryFraction, boolean probeSideOuterJoin, boolean buildSideOuterJoin, boolean useBitmapFilters) throws MemoryAllocationException { this.memManager = memManager; this.firstInput = firstInput; this.secondInput = secondInput; this.probeSideSerializer = serializer1; if(useBitmapFilters && probeSideOuterJoin) { throw new IllegalArgumentException("Bitmap filter may not be activated for joining with empty build side"); } this.probeSideOuterJoin = probeSideOuterJoin; this.buildSideOuterJoin = buildSideOuterJoin; this.nextBuildSideObject = serializer2.createInstance(); this.tempBuildSideRecord = serializer2.createInstance(); this.hashJoin = getHashJoin(serializer2, comparator2, serializer1, comparator1, pairComparator, memManager, ioManager, ownerTask, memoryFraction, useBitmapFilters); }
Example #24
Source File: Int2HashJoinOperatorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBuildSecondHashRightOutJoin() throws Exception { int numKeys1 = 9; int numKeys2 = 10; int buildValsPerKey = 3; int probeValsPerKey = 10; MutableObjectIterator<BinaryRow> buildInput = new UniformBinaryRowGenerator(numKeys1, buildValsPerKey, true); MutableObjectIterator<BinaryRow> probeInput = new UniformBinaryRowGenerator(numKeys2, probeValsPerKey, true); buildJoin(buildInput, probeInput, false, true, false, numKeys1 * buildValsPerKey * probeValsPerKey, numKeys2, -1); }
Example #25
Source File: IterationHeadTask.java From flink with Apache License 2.0 | 5 votes |
private void streamSolutionSetToFinalOutput(CompactingHashTable<X> hashTable) throws IOException { final MutableObjectIterator<X> results = hashTable.getEntryIterator(); final Collector<X> output = this.finalOutputCollector; X record = solutionTypeSerializer.getSerializer().createInstance(); while ((record = results.next(record)) != null) { output.collect(record); } }
Example #26
Source File: SpillingResettableMutableObjectIterator.java From flink with Apache License 2.0 | 5 votes |
public SpillingResettableMutableObjectIterator(MutableObjectIterator<T> input, TypeSerializer<T> serializer, MemoryManager memoryManager, IOManager ioManager, int numPages, AbstractInvokable parentTask) throws MemoryAllocationException { this(input, serializer, memoryManager, ioManager, memoryManager.allocatePages(parentTask, numPages), true); }
Example #27
Source File: NonReusingMergeInnerJoinIterator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public NonReusingMergeInnerJoinIterator( MutableObjectIterator<T1> input1, MutableObjectIterator<T2> input2, TypeSerializer<T1> serializer1, TypeComparator<T1> comparator1, TypeSerializer<T2> serializer2, TypeComparator<T2> comparator2, TypePairComparator<T1, T2> pairComparator, MemoryManager memoryManager, IOManager ioManager, int numMemoryPages, AbstractInvokable parentTask) throws MemoryAllocationException { super(input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparator, memoryManager, ioManager, numMemoryPages, parentTask); }
Example #28
Source File: BinaryExternalSorter.java From flink with Apache License 2.0 | 5 votes |
@Override public MutableObjectIterator<BinaryRow> getIterator() throws InterruptedException { if (!writingDone) { writingDone = true; if (currWriteBuffer != null) { this.circularQueues.sort.add(currWriteBuffer); } // add the sentinel to notify the receivers that the work is done // send the EOF marker this.circularQueues.sort.add(EOF_MARKER); LOG.debug("Sending done."); } synchronized (this.iteratorLock) { // wait while both the iterator and the exception are not set while (this.iterator == null && this.iteratorException == null) { this.iteratorLock.wait(); } if (this.iteratorException != null) { throw new RuntimeException("Error obtaining the sorted input: " + this.iteratorException.getMessage(), this.iteratorException); } else { return this.iterator; } } }
Example #29
Source File: ReusingKeyGroupedIterator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Initializes the KeyGroupedIterator. It requires an iterator which returns its result * sorted by the key fields. * * @param iterator An iterator over records, which are sorted by the key fields, in any order. * @param serializer The serializer for the data type iterated over. * @param comparator The comparator for the data type iterated over. */ public ReusingKeyGroupedIterator(MutableObjectIterator<E> iterator, TypeSerializer<E> serializer, TypeComparator<E> comparator) { if (iterator == null || serializer == null || comparator == null) { throw new NullPointerException(); } this.iterator = iterator; this.serializer = serializer; this.comparator = comparator; this.reuse = this.serializer.createInstance(); }
Example #30
Source File: BinaryHashTableTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testHashWithBuildSideOuterJoin1() throws Exception { final int numKeys = 20000; final int buildValsPerKey = 1; final int probeValsPerKey = 1; // create a build input that gives 40000 pairs with 1 values sharing the same key MutableObjectIterator<BinaryRow> buildInput = new UniformBinaryRowGenerator(2 * numKeys, buildValsPerKey, false); // create a probe input that gives 20000 pairs with 1 values sharing a key MutableObjectIterator<BinaryRow> probeInput = new UniformBinaryRowGenerator(numKeys, probeValsPerKey, true); MemoryManager memManager = new MemoryManager(35 * PAGE_SIZE, 1); // allocate the memory for the HashTable final BinaryHashTable table = new BinaryHashTable(conf, new Object(), this.buildSideSerializer, this.probeSideSerializer, new MyProjection(), new MyProjection(), memManager, 35 * PAGE_SIZE, ioManager, 24, 200000, true, HashJoinType.BUILD_OUTER, null, true, new boolean[]{true}, false); int numRecordsInJoinResult = join(table, buildInput, probeInput, true); Assert.assertEquals("Wrong number of records in join result.", 2 * numKeys * buildValsPerKey * probeValsPerKey, numRecordsInJoinResult); table.close(); table.free(); }