org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator Java Examples
The following examples show how to use
org.apache.flink.runtime.operators.testutils.UniformIntPairGenerator.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FixedLengthRecordSorterTest.java From flink with Apache License 2.0 | 5 votes |
/** * The compare test creates a sorted stream, writes it to the buffer and * compares random elements. It expects that earlier elements are lower than later * ones. */ @Test public void testCompare() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory); UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, true); // write the records IntPair record = new IntPair(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record) && num < 3354624); // compare random elements Random rnd = new Random(SEED << 1); for (int i = 0; i < 2 * num; i++) { int pos1 = rnd.nextInt(num); int pos2 = rnd.nextInt(num); int cmp = sorter.compare(pos1, pos2); if (pos1 < pos2) { Assert.assertTrue(cmp <= 0); } else { Assert.assertTrue(cmp >= 0); } } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example #2
Source File: FixedLengthRecordSorterTest.java From flink with Apache License 2.0 | 5 votes |
/** * The compare test creates a sorted stream, writes it to the buffer and * compares random elements. It expects that earlier elements are lower than later * ones. */ @Test public void testCompare() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory); UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, true); // write the records IntPair record = new IntPair(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record) && num < 3354624); // compare random elements Random rnd = new Random(SEED << 1); for (int i = 0; i < 2 * num; i++) { int pos1 = rnd.nextInt(num); int pos2 = rnd.nextInt(num); int cmp = sorter.compare(pos1, pos2); if (pos1 < pos2) { Assert.assertTrue(cmp <= 0); } else { Assert.assertTrue(cmp >= 0); } } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example #3
Source File: FixedLengthRecordSorterTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * The compare test creates a sorted stream, writes it to the buffer and * compares random elements. It expects that earlier elements are lower than later * ones. */ @Test public void testCompare() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory); UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, true); // write the records IntPair record = new IntPair(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record) && num < 3354624); // compare random elements Random rnd = new Random(SEED << 1); for (int i = 0; i < 2 * num; i++) { int pos1 = rnd.nextInt(num); int pos2 = rnd.nextInt(num); int cmp = sorter.compare(pos1, pos2); if (pos1 < pos2) { Assert.assertTrue(cmp <= 0); } else { Assert.assertTrue(cmp >= 0); } } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example #4
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testBucketsNotFulfillSegment() throws Exception { final int NUM_KEYS = 10000; final int BUILD_VALS_PER_KEY = 3; final int PROBE_VALS_PER_KEY = 10; // create a build input that gives 30000 pairs with 3 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 100000 pairs with 10 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { // 33 is minimum number of pages required to perform hash join this inputs memSegments = this.memManager.allocatePages(MEM_OWNER, 33); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // For FLINK-2545, the buckets data may not fulfill it's buffer, for example, the buffer may contains 256 buckets, // while hash table only assign 250 bucket on it. The unused buffer bytes may contains arbitrary data, which may // influence hash table if forget to skip it. To mock this, put the invalid bucket data(partition=1, inMemory=true, count=-1) // at the end of buffer. for (MemorySegment segment : memSegments) { int newBucketOffset = segment.size() - 128; // initialize the header fields segment.put(newBucketOffset + 0, (byte)0); segment.put(newBucketOffset + 1, (byte)0); segment.putShort(newBucketOffset + 2, (short) -1); segment.putLong(newBucketOffset + 4, ~0x0L); } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #5
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void validateSpillingDuringInsertionIntPair() throws IOException, MemoryAllocationException { final int NUM_BUILD_KEYS = 500000; final int NUM_BUILD_VALS = 1; final int NUM_PROBE_KEYS = 10; final int NUM_PROBE_VALS = 1; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_BUILD_KEYS, NUM_BUILD_VALS, false); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 85); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, new UniformIntPairGenerator(NUM_PROBE_KEYS, NUM_PROBE_VALS, true)); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; int expectedNumResults = (Math.min(NUM_PROBE_KEYS, NUM_BUILD_KEYS) * NUM_BUILD_VALS) * NUM_PROBE_VALS; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", expectedNumResults, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #6
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testHashWithBuildSideOuterJoin1() throws Exception { final int NUM_KEYS = 20000; final int BUILD_VALS_PER_KEY = 1; final int PROBE_VALS_PER_KEY = 1; // create a build input that gives 40000 pairs with 1 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(2 * NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 20000 pairs with 1 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { // 33 is minimum number of pages required to perform hash join this inputs memSegments = this.memManager.allocatePages(MEM_OWNER, 33); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput, true); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", 2 * NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #7
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testHashWithBuildSideOuterJoin2() throws Exception { final int NUM_KEYS = 40000; final int BUILD_VALS_PER_KEY = 2; final int PROBE_VALS_PER_KEY = 1; // The keys of probe and build sides are overlapped, so there would be none unmatched build elements // after probe phase, make sure build side outer join works well in this case. // create a build input that gives 80000 pairs with 2 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 40000 pairs with 1 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { // 33 is minimum number of pages required to perform hash join this inputs memSegments = this.memManager.allocatePages(MEM_OWNER, 33); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput, true); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); IntPair next = buildSide.next(recordReuse); if (next == null && join.getCurrentProbeRecord() == null) { fail("Should not return join result that both probe and build element are null."); } while (next != null) { numRecordsInJoinResult++; next = buildSide.next(recordReuse); } } Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #8
Source File: FixedLengthRecordSorterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testFlushFullMemoryPage() throws Exception { // Insert IntPair which would fill 2 memory pages. final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3); FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory); UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false); // write the records IntPair record = new IntPair(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record) && num < NUM_RECORDS); FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next(); BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID); final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size()); sorter.writeToOutput(outputView, 0, NUM_RECORDS); this.memoryManager.release(outputView.close()); BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID); final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false); final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer); record = iterator.next(record); int i =0; while (record != null) { Assert.assertEquals(i, record.getKey()); record = iterator.next(record); i++; } Assert.assertEquals(NUM_RECORDS, i); this.memoryManager.release(dataBuffer); // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example #9
Source File: FixedLengthRecordSorterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testFlushPartialMemoryPage() throws Exception { // Insert IntPair which would fill 2 memory pages. final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3); FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory); UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false); // write the records IntPair record = new IntPair(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record) && num < NUM_RECORDS); FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next(); BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID); final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size()); sorter.writeToOutput(outputView, 1, NUM_RECORDS - 1); this.memoryManager.release(outputView.close()); BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID); final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false); final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer); record = iterator.next(record); int i =1; while (record != null) { Assert.assertEquals(i, record.getKey()); record = iterator.next(record); i++; } Assert.assertEquals(NUM_RECORDS, i); this.memoryManager.release(dataBuffer); // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example #10
Source File: HashTablePerformanceComparison.java From flink with Apache License 2.0 | 4 votes |
@Test public void testCompactingHashMapPerformance() { try { final int NUM_MEM_PAGES = SIZE * NUM_PAIRS / PAGE_SIZE; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> probeTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updater = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updateTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); long start; long end; long first = System.currentTimeMillis(); System.out.println("Creating and filling CompactingHashMap..."); start = System.currentTimeMillis(); AbstractMutableHashTable<IntPair> table = new CompactingHashTable<IntPair>(serializer, comparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE)); table.open(); IntPair target = new IntPair(); while(buildInput.next(target) != null) { table.insert(target); } end = System.currentTimeMillis(); System.out.println("HashMap ready. Time: " + (end-start) + " ms"); System.out.println("Starting first probing run..."); start = System.currentTimeMillis(); AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(comparator, pairComparator); IntPair temp = new IntPair(); while(probeTester.next(target) != null) { assertNotNull(prober.getMatchFor(target, temp)); assertEquals(temp.getValue(), target.getValue()); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); System.out.println("Starting update..."); start = System.currentTimeMillis(); while(updater.next(target) != null) { target.setValue(target.getValue() + 1); table.insertOrReplaceRecord(target); } end = System.currentTimeMillis(); System.out.println("Update done. Time: " + (end-start) + " ms"); System.out.println("Starting second probing run..."); start = System.currentTimeMillis(); while (updateTester.next(target) != null) { assertNotNull(prober.getMatchFor(target, temp)); assertEquals(target.getValue() + 1, temp.getValue()); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); table.close(); end = System.currentTimeMillis(); System.out.println("Overall time: " + (end-first) + " ms"); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); } catch (Exception e) { e.printStackTrace(); fail("Error: " + e.getMessage()); } }
Example #11
Source File: HashTablePerformanceComparison.java From flink with Apache License 2.0 | 4 votes |
@Test public void testMutableHashMapPerformance() { try (IOManager ioManager = new IOManagerAsync()) { final int NUM_MEM_PAGES = SIZE * NUM_PAIRS / PAGE_SIZE; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(0, 1, false); MutableObjectIterator<IntPair> probeTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updater = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updateTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); long start; long end; long first = System.currentTimeMillis(); System.out.println("Creating and filling MutableHashMap..."); start = System.currentTimeMillis(); MutableHashTable<IntPair, IntPair> table = new MutableHashTable<IntPair, IntPair>(serializer, serializer, comparator, comparator, pairComparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE), ioManager); table.open(buildInput, probeInput); end = System.currentTimeMillis(); System.out.println("HashMap ready. Time: " + (end-start) + " ms"); System.out.println("Starting first probing run..."); start = System.currentTimeMillis(); IntPair compare = new IntPair(); HashBucketIterator<IntPair, IntPair> iter; IntPair target = new IntPair(); while(probeTester.next(compare) != null) { iter = table.getMatchesFor(compare); iter.next(target); assertEquals(target.getKey(), compare.getKey()); assertEquals(target.getValue(), compare.getValue()); assertTrue(iter.next(target) == null); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); System.out.println("Starting update..."); start = System.currentTimeMillis(); while(updater.next(compare) != null) { compare.setValue(compare.getValue() + 1); iter = table.getMatchesFor(compare); iter.next(target); iter.writeBack(compare); //assertFalse(iter.next(target)); } end = System.currentTimeMillis(); System.out.println("Update done. Time: " + (end-start) + " ms"); System.out.println("Starting second probing run..."); start = System.currentTimeMillis(); while(updateTester.next(compare) != null) { compare.setValue(compare.getValue() + 1); iter = table.getMatchesFor(compare); iter.next(target); assertEquals(target.getKey(), compare.getKey()); assertEquals(target.getValue(), compare.getValue()); assertTrue(iter.next(target) == null); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); table.close(); end = System.currentTimeMillis(); System.out.println("Overall time: " + (end-first) + " ms"); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreedMemory().size()); } catch (Exception e) { e.printStackTrace(); fail("Error: " + e.getMessage()); } }
Example #12
Source File: HashTablePerformanceComparison.java From flink with Apache License 2.0 | 4 votes |
@Test public void testInPlaceMutableHashTablePerformance() { try { final int NUM_MEM_PAGES = SIZE * NUM_PAIRS / PAGE_SIZE; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> probeTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updater = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updateTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); long start; long end; long first = System.currentTimeMillis(); System.out.println("Creating and filling InPlaceMutableHashTable..."); start = System.currentTimeMillis(); InPlaceMutableHashTable<IntPair> table = new InPlaceMutableHashTable<>(serializer, comparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE)); table.open(); IntPair target = new IntPair(); while(buildInput.next(target) != null) { table.insert(target); } end = System.currentTimeMillis(); System.out.println("HashMap ready. Time: " + (end-start) + " ms"); System.out.println("Starting first probing run..."); start = System.currentTimeMillis(); AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(comparator, pairComparator); IntPair temp = new IntPair(); while(probeTester.next(target) != null) { assertNotNull(prober.getMatchFor(target, temp)); assertEquals(temp.getValue(), target.getValue()); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); System.out.println("Starting update..."); start = System.currentTimeMillis(); while(updater.next(target) != null) { target.setValue(target.getValue() + 1); table.insertOrReplaceRecord(target); } end = System.currentTimeMillis(); System.out.println("Update done. Time: " + (end-start) + " ms"); System.out.println("Starting second probing run..."); start = System.currentTimeMillis(); while (updateTester.next(target) != null) { assertNotNull(prober.getMatchFor(target, temp)); assertEquals(target.getValue() + 1, temp.getValue()); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); table.close(); end = System.currentTimeMillis(); System.out.println("Overall time: " + (end-first) + " ms"); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); } catch (Exception e) { e.printStackTrace(); fail("Error: " + e.getMessage()); } }
Example #13
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testInMemoryMutableHashTableIntPair() throws IOException { final int NUM_KEYS = 100000; final int BUILD_VALS_PER_KEY = 3; final int PROBE_VALS_PER_KEY = 10; // create a build input that gives 3 million pairs with 3 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 10 million pairs with 10 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 896); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); // ---------------------------------------------------------------------------------------- this.memManager.release(join.getFreedMemory()); }
Example #14
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSpillingHashJoinOneRecursionPerformanceIntPair() throws IOException { final int NUM_KEYS = 1000000; final int BUILD_VALS_PER_KEY = 3; final int PROBE_VALS_PER_KEY = 10; // create a build input that gives 3 million pairs with 3 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 10 million pairs with 10 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 896); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); // ---------------------------------------------------------------------------------------- this.memManager.release(join.getFreedMemory()); }
Example #15
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testFailingHashJoinTooManyRecursionsIntPair() throws IOException { // the following two values are known to have a hash-code collision on the first recursion level. // we use them to make sure one partition grows over-proportionally large final int REPEATED_VALUE_1 = 40559; final int REPEATED_VALUE_2 = 92882; final int REPEATED_VALUE_COUNT = 3000000; final int NUM_KEYS = 1000000; final int BUILD_VALS_PER_KEY = 3; final int PROBE_VALS_PER_KEY = 10; // create a build input that gives 3 million pairs with 3 values sharing the same key, plus 400k pairs with two colliding keys MutableObjectIterator<IntPair> build1 = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); MutableObjectIterator<IntPair> build2 = new ConstantsIntPairsIterator(REPEATED_VALUE_1, 17, REPEATED_VALUE_COUNT); MutableObjectIterator<IntPair> build3 = new ConstantsIntPairsIterator(REPEATED_VALUE_2, 23, REPEATED_VALUE_COUNT); List<MutableObjectIterator<IntPair>> builds = new ArrayList<MutableObjectIterator<IntPair>>(); builds.add(build1); builds.add(build2); builds.add(build3); MutableObjectIterator<IntPair> buildInput = new UnionIterator<IntPair>(builds); // create a probe input that gives 10 million pairs with 10 values sharing a key MutableObjectIterator<IntPair> probe1 = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); MutableObjectIterator<IntPair> probe2 = new ConstantsIntPairsIterator(REPEATED_VALUE_1, 17, REPEATED_VALUE_COUNT); MutableObjectIterator<IntPair> probe3 = new ConstantsIntPairsIterator(REPEATED_VALUE_2, 23, REPEATED_VALUE_COUNT); List<MutableObjectIterator<IntPair>> probes = new ArrayList<MutableObjectIterator<IntPair>>(); probes.add(probe1); probes.add(probe2); probes.add(probe3); MutableObjectIterator<IntPair> probeInput = new UnionIterator<IntPair>(probes); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 896); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput); final IntPair recordReuse = new IntPair(); try { while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); if (buildSide.next(recordReuse) == null) { fail("No build side values found for a probe key."); } while (buildSide.next(recordReuse) != null); } fail("Hash Join must have failed due to too many recursions."); } catch (Exception ex) { // expected } join.close(); // ---------------------------------------------------------------------------------------- this.memManager.release(join.getFreedMemory()); }
Example #16
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSparseProbeSpillingIntPair() throws IOException, MemoryAllocationException { final int NUM_BUILD_KEYS = 1000000; final int NUM_BUILD_VALS = 1; final int NUM_PROBE_KEYS = 20; final int NUM_PROBE_VALS = 1; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_BUILD_KEYS, NUM_BUILD_VALS, false); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 128); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, new UniformIntPairGenerator(NUM_PROBE_KEYS, NUM_PROBE_VALS, true)); int expectedNumResults = (Math.min(NUM_PROBE_KEYS, NUM_BUILD_KEYS) * NUM_BUILD_VALS) * NUM_PROBE_VALS; final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", expectedNumResults, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #17
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void validateSpillingDuringInsertionIntPair() throws IOException, MemoryAllocationException { final int NUM_BUILD_KEYS = 500000; final int NUM_BUILD_VALS = 1; final int NUM_PROBE_KEYS = 10; final int NUM_PROBE_VALS = 1; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_BUILD_KEYS, NUM_BUILD_VALS, false); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 85); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, new UniformIntPairGenerator(NUM_PROBE_KEYS, NUM_PROBE_VALS, true)); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; int expectedNumResults = (Math.min(NUM_PROBE_KEYS, NUM_BUILD_KEYS) * NUM_BUILD_VALS) * NUM_PROBE_VALS; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", expectedNumResults, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #18
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testBucketsNotFulfillSegment() throws Exception { final int NUM_KEYS = 10000; final int BUILD_VALS_PER_KEY = 3; final int PROBE_VALS_PER_KEY = 10; // create a build input that gives 30000 pairs with 3 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 100000 pairs with 10 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { // 33 is minimum number of pages required to perform hash join this inputs memSegments = this.memManager.allocatePages(MEM_OWNER, 33); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // For FLINK-2545, the buckets data may not fulfill it's buffer, for example, the buffer may contains 256 buckets, // while hash table only assign 250 bucket on it. The unused buffer bytes may contains arbitrary data, which may // influence hash table if forget to skip it. To mock this, put the invalid bucket data(partition=1, inMemory=true, count=-1) // at the end of buffer. for (MemorySegment segment : memSegments) { int newBucketOffset = segment.size() - 128; // initialize the header fields segment.put(newBucketOffset + 0, (byte)0); segment.put(newBucketOffset + 1, (byte)0); segment.putShort(newBucketOffset + 2, (short) -1); segment.putLong(newBucketOffset + 4, ~0x0L); } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #19
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testHashWithBuildSideOuterJoin1() throws Exception { final int NUM_KEYS = 20000; final int BUILD_VALS_PER_KEY = 1; final int PROBE_VALS_PER_KEY = 1; // create a build input that gives 40000 pairs with 1 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(2 * NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 20000 pairs with 1 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { // 33 is minimum number of pages required to perform hash join this inputs memSegments = this.memManager.allocatePages(MEM_OWNER, 33); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput, true); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", 2 * NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #20
Source File: HashTableITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testHashWithBuildSideOuterJoin2() throws Exception { final int NUM_KEYS = 40000; final int BUILD_VALS_PER_KEY = 2; final int PROBE_VALS_PER_KEY = 1; // The keys of probe and build sides are overlapped, so there would be none unmatched build elements // after probe phase, make sure build side outer join works well in this case. // create a build input that gives 80000 pairs with 2 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 40000 pairs with 1 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { // 33 is minimum number of pages required to perform hash join this inputs memSegments = this.memManager.allocatePages(MEM_OWNER, 33); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput, true); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); IntPair next = buildSide.next(recordReuse); if (next == null && join.getCurrentProbeRecord() == null) { fail("Should not return join result that both probe and build element are null."); } while (next != null) { numRecordsInJoinResult++; next = buildSide.next(recordReuse); } } Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #21
Source File: FixedLengthRecordSorterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testFlushFullMemoryPage() throws Exception { // Insert IntPair which would fill 2 memory pages. final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3); FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory); UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false); // write the records IntPair record = new IntPair(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record) && num < NUM_RECORDS); FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next(); BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID); final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size()); sorter.writeToOutput(outputView, 0, NUM_RECORDS); this.memoryManager.release(outputView.close()); BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID); final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false); final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer); record = iterator.next(record); int i =0; while (record != null) { Assert.assertEquals(i, record.getKey()); record = iterator.next(record); i++; } Assert.assertEquals(NUM_RECORDS, i); this.memoryManager.release(dataBuffer); // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example #22
Source File: FixedLengthRecordSorterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testFlushPartialMemoryPage() throws Exception { // Insert IntPair which would fill 2 memory pages. final int NUM_RECORDS = 2 * MEMORY_PAGE_SIZE / 8; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), 3); FixedLengthRecordSorter<IntPair> sorter = newSortBuffer(memory); UniformIntPairGenerator generator = new UniformIntPairGenerator(Integer.MAX_VALUE, 1, false); // write the records IntPair record = new IntPair(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record) && num < NUM_RECORDS); FileIOChannel.ID channelID = this.ioManager.createChannelEnumerator().next(); BlockChannelWriter<MemorySegment> blockChannelWriter = this.ioManager.createBlockChannelWriter(channelID); final List<MemorySegment> writeBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelWriterOutputView outputView = new ChannelWriterOutputView(blockChannelWriter, writeBuffer, writeBuffer.get(0).size()); sorter.writeToOutput(outputView, 1, NUM_RECORDS - 1); this.memoryManager.release(outputView.close()); BlockChannelReader<MemorySegment> blockChannelReader = this.ioManager.createBlockChannelReader(channelID); final List<MemorySegment> readBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelReaderInputView readerInputView = new ChannelReaderInputView(blockChannelReader, readBuffer, false); final List<MemorySegment> dataBuffer = this.memoryManager.allocatePages(new DummyInvokable(), 3); ChannelReaderInputViewIterator<IntPair> iterator = new ChannelReaderInputViewIterator(readerInputView, dataBuffer, this.serializer); record = iterator.next(record); int i =1; while (record != null) { Assert.assertEquals(i, record.getKey()); record = iterator.next(record); i++; } Assert.assertEquals(NUM_RECORDS, i); this.memoryManager.release(dataBuffer); // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example #23
Source File: HashTableITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testHashWithBuildSideOuterJoin2() throws Exception { final int NUM_KEYS = 40000; final int BUILD_VALS_PER_KEY = 2; final int PROBE_VALS_PER_KEY = 1; // The keys of probe and build sides are overlapped, so there would be none unmatched build elements // after probe phase, make sure build side outer join works well in this case. // create a build input that gives 80000 pairs with 2 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 40000 pairs with 1 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { // 33 is minimum number of pages required to perform hash join this inputs memSegments = this.memManager.allocatePages(MEM_OWNER, 33); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput, true); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); IntPair next = buildSide.next(recordReuse); if (next == null && join.getCurrentProbeRecord() == null) { fail("Should not return join result that both probe and build element are null."); } while (next != null) { numRecordsInJoinResult++; next = buildSide.next(recordReuse); } } Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #24
Source File: HashTablePerformanceComparison.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testMutableHashMapPerformance() { final IOManager ioManager = new IOManagerAsync(); try { final int NUM_MEM_PAGES = SIZE * NUM_PAIRS / PAGE_SIZE; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(0, 1, false); MutableObjectIterator<IntPair> probeTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updater = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updateTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); long start; long end; long first = System.currentTimeMillis(); System.out.println("Creating and filling MutableHashMap..."); start = System.currentTimeMillis(); MutableHashTable<IntPair, IntPair> table = new MutableHashTable<IntPair, IntPair>(serializer, serializer, comparator, comparator, pairComparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE), ioManager); table.open(buildInput, probeInput); end = System.currentTimeMillis(); System.out.println("HashMap ready. Time: " + (end-start) + " ms"); System.out.println("Starting first probing run..."); start = System.currentTimeMillis(); IntPair compare = new IntPair(); HashBucketIterator<IntPair, IntPair> iter; IntPair target = new IntPair(); while(probeTester.next(compare) != null) { iter = table.getMatchesFor(compare); iter.next(target); assertEquals(target.getKey(), compare.getKey()); assertEquals(target.getValue(), compare.getValue()); assertTrue(iter.next(target) == null); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); System.out.println("Starting update..."); start = System.currentTimeMillis(); while(updater.next(compare) != null) { compare.setValue(compare.getValue() + 1); iter = table.getMatchesFor(compare); iter.next(target); iter.writeBack(compare); //assertFalse(iter.next(target)); } end = System.currentTimeMillis(); System.out.println("Update done. Time: " + (end-start) + " ms"); System.out.println("Starting second probing run..."); start = System.currentTimeMillis(); while(updateTester.next(compare) != null) { compare.setValue(compare.getValue() + 1); iter = table.getMatchesFor(compare); iter.next(target); assertEquals(target.getKey(), compare.getKey()); assertEquals(target.getValue(), compare.getValue()); assertTrue(iter.next(target) == null); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); table.close(); end = System.currentTimeMillis(); System.out.println("Overall time: " + (end-first) + " ms"); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreedMemory().size()); } catch (Exception e) { e.printStackTrace(); fail("Error: " + e.getMessage()); } finally { ioManager.shutdown(); } }
Example #25
Source File: HashTablePerformanceComparison.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testInPlaceMutableHashTablePerformance() { try { final int NUM_MEM_PAGES = SIZE * NUM_PAIRS / PAGE_SIZE; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> probeTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updater = new UniformIntPairGenerator(NUM_PAIRS, 1, false); MutableObjectIterator<IntPair> updateTester = new UniformIntPairGenerator(NUM_PAIRS, 1, false); long start; long end; long first = System.currentTimeMillis(); System.out.println("Creating and filling InPlaceMutableHashTable..."); start = System.currentTimeMillis(); InPlaceMutableHashTable<IntPair> table = new InPlaceMutableHashTable<>(serializer, comparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE)); table.open(); IntPair target = new IntPair(); while(buildInput.next(target) != null) { table.insert(target); } end = System.currentTimeMillis(); System.out.println("HashMap ready. Time: " + (end-start) + " ms"); System.out.println("Starting first probing run..."); start = System.currentTimeMillis(); AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(comparator, pairComparator); IntPair temp = new IntPair(); while(probeTester.next(target) != null) { assertNotNull(prober.getMatchFor(target, temp)); assertEquals(temp.getValue(), target.getValue()); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); System.out.println("Starting update..."); start = System.currentTimeMillis(); while(updater.next(target) != null) { target.setValue(target.getValue() + 1); table.insertOrReplaceRecord(target); } end = System.currentTimeMillis(); System.out.println("Update done. Time: " + (end-start) + " ms"); System.out.println("Starting second probing run..."); start = System.currentTimeMillis(); while (updateTester.next(target) != null) { assertNotNull(prober.getMatchFor(target, temp)); assertEquals(target.getValue() + 1, temp.getValue()); } end = System.currentTimeMillis(); System.out.println("Probing done. Time: " + (end-start) + " ms"); table.close(); end = System.currentTimeMillis(); System.out.println("Overall time: " + (end-first) + " ms"); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); } catch (Exception e) { e.printStackTrace(); fail("Error: " + e.getMessage()); } }
Example #26
Source File: HashTableITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testInMemoryMutableHashTableIntPair() throws IOException { final int NUM_KEYS = 100000; final int BUILD_VALS_PER_KEY = 3; final int PROBE_VALS_PER_KEY = 10; // create a build input that gives 3 million pairs with 3 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 10 million pairs with 10 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 896); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); // ---------------------------------------------------------------------------------------- this.memManager.release(join.getFreedMemory()); }
Example #27
Source File: HashTableITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testSpillingHashJoinOneRecursionPerformanceIntPair() throws IOException { final int NUM_KEYS = 1000000; final int BUILD_VALS_PER_KEY = 3; final int PROBE_VALS_PER_KEY = 10; // create a build input that gives 3 million pairs with 3 values sharing the same key MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); // create a probe input that gives 10 million pairs with 10 values sharing a key MutableObjectIterator<IntPair> probeInput = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 896); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", NUM_KEYS * BUILD_VALS_PER_KEY * PROBE_VALS_PER_KEY, numRecordsInJoinResult); join.close(); // ---------------------------------------------------------------------------------------- this.memManager.release(join.getFreedMemory()); }
Example #28
Source File: HashTableITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testFailingHashJoinTooManyRecursionsIntPair() throws IOException { // the following two values are known to have a hash-code collision on the first recursion level. // we use them to make sure one partition grows over-proportionally large final int REPEATED_VALUE_1 = 40559; final int REPEATED_VALUE_2 = 92882; final int REPEATED_VALUE_COUNT = 3000000; final int NUM_KEYS = 1000000; final int BUILD_VALS_PER_KEY = 3; final int PROBE_VALS_PER_KEY = 10; // create a build input that gives 3 million pairs with 3 values sharing the same key, plus 400k pairs with two colliding keys MutableObjectIterator<IntPair> build1 = new UniformIntPairGenerator(NUM_KEYS, BUILD_VALS_PER_KEY, false); MutableObjectIterator<IntPair> build2 = new ConstantsIntPairsIterator(REPEATED_VALUE_1, 17, REPEATED_VALUE_COUNT); MutableObjectIterator<IntPair> build3 = new ConstantsIntPairsIterator(REPEATED_VALUE_2, 23, REPEATED_VALUE_COUNT); List<MutableObjectIterator<IntPair>> builds = new ArrayList<MutableObjectIterator<IntPair>>(); builds.add(build1); builds.add(build2); builds.add(build3); MutableObjectIterator<IntPair> buildInput = new UnionIterator<IntPair>(builds); // create a probe input that gives 10 million pairs with 10 values sharing a key MutableObjectIterator<IntPair> probe1 = new UniformIntPairGenerator(NUM_KEYS, PROBE_VALS_PER_KEY, true); MutableObjectIterator<IntPair> probe2 = new ConstantsIntPairsIterator(REPEATED_VALUE_1, 17, REPEATED_VALUE_COUNT); MutableObjectIterator<IntPair> probe3 = new ConstantsIntPairsIterator(REPEATED_VALUE_2, 23, REPEATED_VALUE_COUNT); List<MutableObjectIterator<IntPair>> probes = new ArrayList<MutableObjectIterator<IntPair>>(); probes.add(probe1); probes.add(probe2); probes.add(probe3); MutableObjectIterator<IntPair> probeInput = new UnionIterator<IntPair>(probes); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 896); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } // ---------------------------------------------------------------------------------------- final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, probeInput); final IntPair recordReuse = new IntPair(); try { while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); if (buildSide.next(recordReuse) == null) { fail("No build side values found for a probe key."); } while (buildSide.next(recordReuse) != null); } fail("Hash Join must have failed due to too many recursions."); } catch (Exception ex) { // expected } join.close(); // ---------------------------------------------------------------------------------------- this.memManager.release(join.getFreedMemory()); }
Example #29
Source File: HashTableITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testSparseProbeSpillingIntPair() throws IOException, MemoryAllocationException { final int NUM_BUILD_KEYS = 1000000; final int NUM_BUILD_VALS = 1; final int NUM_PROBE_KEYS = 20; final int NUM_PROBE_VALS = 1; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_BUILD_KEYS, NUM_BUILD_VALS, false); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 128); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, new UniformIntPairGenerator(NUM_PROBE_KEYS, NUM_PROBE_VALS, true)); int expectedNumResults = (Math.min(NUM_PROBE_KEYS, NUM_BUILD_KEYS) * NUM_BUILD_VALS) * NUM_PROBE_VALS; final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", expectedNumResults, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }
Example #30
Source File: HashTableITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void validateSpillingDuringInsertionIntPair() throws IOException, MemoryAllocationException { final int NUM_BUILD_KEYS = 500000; final int NUM_BUILD_VALS = 1; final int NUM_PROBE_KEYS = 10; final int NUM_PROBE_VALS = 1; MutableObjectIterator<IntPair> buildInput = new UniformIntPairGenerator(NUM_BUILD_KEYS, NUM_BUILD_VALS, false); // allocate the memory for the HashTable List<MemorySegment> memSegments; try { memSegments = this.memManager.allocatePages(MEM_OWNER, 85); } catch (MemoryAllocationException maex) { fail("Memory for the Join could not be provided."); return; } final MutableHashTable<IntPair, IntPair> join = new MutableHashTable<IntPair, IntPair>( this.pairBuildSideAccesssor, this.pairProbeSideAccesssor, this.pairBuildSideComparator, this.pairProbeSideComparator, this.pairComparator, memSegments, ioManager); join.open(buildInput, new UniformIntPairGenerator(NUM_PROBE_KEYS, NUM_PROBE_VALS, true)); final IntPair recordReuse = new IntPair(); int numRecordsInJoinResult = 0; int expectedNumResults = (Math.min(NUM_PROBE_KEYS, NUM_BUILD_KEYS) * NUM_BUILD_VALS) * NUM_PROBE_VALS; while (join.nextRecord()) { MutableObjectIterator<IntPair> buildSide = join.getBuildSideIterator(); while (buildSide.next(recordReuse) != null) { numRecordsInJoinResult++; } } Assert.assertEquals("Wrong number of records in join result.", expectedNumResults, numRecordsInJoinResult); join.close(); this.memManager.release(join.getFreedMemory()); }