Java Code Examples for org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator.ValueMode#FIX_LENGTH
The following examples show how to use
org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator.ValueMode#FIX_LENGTH .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ReOpenableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Verify proper operation if the build side is spilled to disk. */ @Test public void testDoubleProbeSpilling() { int buildSize = 1000; int probeSize = 1000; try { TupleGenerator bgen = new TupleGenerator(SEED1, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH); TupleGenerator pgen = new TupleGenerator(SEED2, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH); final TupleGeneratorIterator buildInput = new TupleGeneratorIterator(bgen, buildSize); final TupleGeneratorIterator probeInput = new TupleGeneratorIterator(pgen, probeSize); doTest(buildInput,probeInput, bgen, pgen); } catch (Exception e) { e.printStackTrace(); Assert.fail("An exception occurred during the test: " + e.getMessage()); } }
Example 2
Source File: ReOpenableHashTableTestBase.java From flink with Apache License 2.0 | 6 votes |
/** * This test case verifies that hybrid hash join is able to handle multiple probe phases * when the build side fits completely into memory. */ @Test public void testDoubleProbeInMemory() { int buildSize = 1000; int probeSize = 1000; try { TupleGenerator bgen = new TupleGenerator(SEED1, 0, 28, KeyMode.SORTED, ValueMode.FIX_LENGTH); TupleGenerator pgen = new TupleGenerator(SEED2, 0, 28, KeyMode.SORTED, ValueMode.FIX_LENGTH); final TupleGeneratorIterator buildInput = new TupleGeneratorIterator(bgen, buildSize); final TupleGeneratorIterator probeInput = new TupleGeneratorIterator(pgen, probeSize); doTest(buildInput,probeInput, bgen, pgen); } catch (Exception e) { e.printStackTrace(); Assert.fail("An exception occurred during the test: " + e.getMessage()); } }
Example 3
Source File: ReOpenableHashTableTestBase.java From flink with Apache License 2.0 | 6 votes |
/** * Verify proper operation if the build side is spilled to disk. */ @Test public void testDoubleProbeSpilling() { int buildSize = 1000; int probeSize = 1000; try { TupleGenerator bgen = new TupleGenerator(SEED1, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH); TupleGenerator pgen = new TupleGenerator(SEED2, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH); final TupleGeneratorIterator buildInput = new TupleGeneratorIterator(bgen, buildSize); final TupleGeneratorIterator probeInput = new TupleGeneratorIterator(pgen, probeSize); doTest(buildInput,probeInput, bgen, pgen); } catch (Exception e) { e.printStackTrace(); Assert.fail("An exception occurred during the test: " + e.getMessage()); } }
Example 4
Source File: ReOpenableHashTableTestBase.java From flink with Apache License 2.0 | 6 votes |
/** * Test behavior with overflow buckets (Overflow buckets must be initialized correctly * if the input is reopened again) */ @Test public void testOverflow() { int buildSize = 1000; int probeSize = 1000; try { TupleGenerator bgen = new TupleGenerator(SEED1, 200, 1024, KeyMode.RANDOM, ValueMode.FIX_LENGTH); TupleGenerator pgen = new TupleGenerator(SEED2, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH); final TupleGeneratorIterator buildInput = new TupleGeneratorIterator(bgen, buildSize); final TupleGeneratorIterator probeInput = new TupleGeneratorIterator(pgen, probeSize); doTest(buildInput,probeInput, bgen, pgen); } catch (Exception e) { e.printStackTrace(); Assert.fail("An exception occurred during the test: " + e.getMessage()); } }
Example 5
Source File: ReOpenableHashTableTestBase.java From flink with Apache License 2.0 | 6 votes |
/** * This test case verifies that hybrid hash join is able to handle multiple probe phases * when the build side fits completely into memory. */ @Test public void testDoubleProbeInMemory() { int buildSize = 1000; int probeSize = 1000; try { TupleGenerator bgen = new TupleGenerator(SEED1, 0, 28, KeyMode.SORTED, ValueMode.FIX_LENGTH); TupleGenerator pgen = new TupleGenerator(SEED2, 0, 28, KeyMode.SORTED, ValueMode.FIX_LENGTH); final TupleGeneratorIterator buildInput = new TupleGeneratorIterator(bgen, buildSize); final TupleGeneratorIterator probeInput = new TupleGeneratorIterator(pgen, probeSize); doTest(buildInput,probeInput, bgen, pgen); } catch (Exception e) { e.printStackTrace(); Assert.fail("An exception occurred during the test: " + e.getMessage()); } }
Example 6
Source File: ReOpenableHashTableTestBase.java From flink with Apache License 2.0 | 6 votes |
/** * Verify proper operation if the build side is spilled to disk. */ @Test public void testDoubleProbeSpilling() { int buildSize = 1000; int probeSize = 1000; try { TupleGenerator bgen = new TupleGenerator(SEED1, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH); TupleGenerator pgen = new TupleGenerator(SEED2, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH); final TupleGeneratorIterator buildInput = new TupleGeneratorIterator(bgen, buildSize); final TupleGeneratorIterator probeInput = new TupleGeneratorIterator(pgen, probeSize); doTest(buildInput,probeInput, bgen, pgen); } catch (Exception e) { e.printStackTrace(); Assert.fail("An exception occurred during the test: " + e.getMessage()); } }
Example 7
Source File: ReOpenableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * This test case verifies that hybrid hash join is able to handle multiple probe phases * when the build side fits completely into memory. */ @Test public void testDoubleProbeInMemory() { int buildSize = 1000; int probeSize = 1000; try { TupleGenerator bgen = new TupleGenerator(SEED1, 0, 28, KeyMode.SORTED, ValueMode.FIX_LENGTH); TupleGenerator pgen = new TupleGenerator(SEED2, 0, 28, KeyMode.SORTED, ValueMode.FIX_LENGTH); final TupleGeneratorIterator buildInput = new TupleGeneratorIterator(bgen, buildSize); final TupleGeneratorIterator probeInput = new TupleGeneratorIterator(pgen, probeSize); doTest(buildInput,probeInput, bgen, pgen); } catch (Exception e) { e.printStackTrace(); Assert.fail("An exception occurred during the test: " + e.getMessage()); } }
Example 8
Source File: ReOpenableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Test behavior with overflow buckets (Overflow buckets must be initialized correctly * if the input is reopened again) */ @Test public void testOverflow() { int buildSize = 1000; int probeSize = 1000; try { TupleGenerator bgen = new TupleGenerator(SEED1, 200, 1024, KeyMode.RANDOM, ValueMode.FIX_LENGTH); TupleGenerator pgen = new TupleGenerator(SEED2, 0, 1024, KeyMode.SORTED, ValueMode.FIX_LENGTH); final TupleGeneratorIterator buildInput = new TupleGeneratorIterator(bgen, buildSize); final TupleGeneratorIterator probeInput = new TupleGeneratorIterator(pgen, probeSize); doTest(buildInput,probeInput, bgen, pgen); } catch (Exception e) { e.printStackTrace(); Assert.fail("An exception occurred during the test: " + e.getMessage()); } }
Example 9
Source File: NormalizedKeySorterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testReset() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); NormalizedKeySorter<Tuple2<Integer, String>> sorter = newSortBuffer(memory); TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the buffer full with the first set of records Tuple2<Integer, String> record = new Tuple2<>(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record)); sorter.reset(); // write a second sequence of records. since the values are of fixed length, we must be able to write an equal number generator = new TestData.TupleGenerator(SEED2, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the buffer full with the first set of records int num2 = -1; do { generator.next(record); num2++; } while (sorter.write(record)); Assert.assertEquals("The number of records written after the reset was not the same as before.", num, num2); // re-read the records generator.reset(); Tuple2<Integer, String> readTarget = new Tuple2<>(); int i = 0; while (i < num) { generator.next(record); readTarget = sorter.getRecord(readTarget, i++); int rk = readTarget.f0; int gk = record.f0; String rv = readTarget.f1; String gv = record.f1; Assert.assertEquals("The re-read key is wrong", gk, rk); Assert.assertEquals("The re-read value is wrong", gv, rv); } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example 10
Source File: NormalizedKeySorterTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testSortLongStringKeys() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); @SuppressWarnings("unchecked") TypeComparator<Tuple2<Integer, String>> accessors = TestData.getIntStringTupleTypeInfo().createComparator(new int[]{1}, new boolean[]{true}, 0, null); NormalizedKeySorter<Tuple2<Integer, String>> sorter = new NormalizedKeySorter<>(TestData.getIntStringTupleSerializer(), accessors, memory); TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the records Tuple2<Integer, String> record = new Tuple2<>(); do { generator.next(record); } while (sorter.write(record)); QuickSort qs = new QuickSort(); qs.sort(sorter); MutableObjectIterator<Tuple2<Integer, String>> iter = sorter.getIterator(); Tuple2<Integer, String> readTarget = new Tuple2<>(); iter.next(readTarget); String last = readTarget.f1; while ((readTarget = iter.next(readTarget)) != null) { String current = readTarget.f1; final int cmp = last.compareTo(current); if (cmp > 0) { Assert.fail("Next value is not larger or equal to previous value."); } last = current; } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example 11
Source File: NormalizedKeySorterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSortShortStringKeys() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); @SuppressWarnings("unchecked") TypeComparator<Tuple2<Integer, String>> accessors = TestData.getIntStringTupleTypeInfo().createComparator(new int[]{1}, new boolean[]{true}, 0, null); NormalizedKeySorter<Tuple2<Integer, String>> sorter = new NormalizedKeySorter<>(TestData.getIntStringTupleSerializer(), accessors, memory); TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, 5, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the records Tuple2<Integer, String> record = new Tuple2<>(); do { generator.next(record); } while (sorter.write(record)); QuickSort qs = new QuickSort(); qs.sort(sorter); MutableObjectIterator<Tuple2<Integer, String>> iter = sorter.getIterator(); Tuple2<Integer, String> readTarget = new Tuple2<>(); iter.next(readTarget); String last = readTarget.f1; while ((readTarget = iter.next(readTarget)) != null) { String current = readTarget.f1; final int cmp = last.compareTo(current); if (cmp > 0) { Assert.fail("Next value is not larger or equal to previous value."); } last = current; } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example 12
Source File: NormalizedKeySorterTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testReset() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); NormalizedKeySorter<Tuple2<Integer, String>> sorter = newSortBuffer(memory); TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the buffer full with the first set of records Tuple2<Integer, String> record = new Tuple2<>(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record)); sorter.reset(); // write a second sequence of records. since the values are of fixed length, we must be able to write an equal number generator = new TestData.TupleGenerator(SEED2, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the buffer full with the first set of records int num2 = -1; do { generator.next(record); num2++; } while (sorter.write(record)); Assert.assertEquals("The number of records written after the reset was not the same as before.", num, num2); // re-read the records generator.reset(); Tuple2<Integer, String> readTarget = new Tuple2<>(); int i = 0; while (i < num) { generator.next(record); readTarget = sorter.getRecord(readTarget, i++); int rk = readTarget.f0; int gk = record.f0; String rv = readTarget.f1; String gv = record.f1; Assert.assertEquals("The re-read key is wrong", gk, rk); Assert.assertEquals("The re-read value is wrong", gv, rv); } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example 13
Source File: CombiningUnilateralSortMergerITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testSortAndValidate() throws Exception { final Hashtable<Integer, Integer> countTable = new Hashtable<>(KEY_MAX); for (int i = 1; i <= KEY_MAX; i++) { countTable.put(i, 0); } // comparator final TypeComparator<Integer> keyComparator = new IntComparator(true); // reader TestData.MockTuple2Reader<Tuple2<Integer, String>> reader = TestData.getIntStringTupleReader(); // merge iterator LOG.debug("initializing sortmerger"); TestCountCombiner2 comb = new TestCountCombiner2(); Sorter<Tuple2<Integer, String>> merger = new CombiningUnilateralSortMerger<>(comb, this.memoryManager, this.ioManager, reader, this.parentTask, this.serializerFactory1, this.comparator1, 0.25, 2, 0.7f, true /* use large record handler */, false); // emit data LOG.debug("emitting data"); TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS; i++) { Assert.assertTrue((rec = generator.next(rec)) != null); final Integer key = rec.f0; rec.setField("1", 1); reader.emit(rec); countTable.put(key, countTable.get(key) + 1); } reader.close(); // check order MutableObjectIterator<Tuple2<Integer, String>> iterator = merger.getIterator(); LOG.debug("checking results"); Tuple2<Integer, String> rec1 = new Tuple2<>(); Tuple2<Integer, String> rec2 = new Tuple2<>(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); countTable.put(rec1.f0, countTable.get(rec1.f0) - (Integer.parseInt(rec1.f1))); while ((rec2 = iterator.next(rec2)) != null) { int k1 = rec1.f0; int k2 = rec2.f0; Assert.assertTrue(keyComparator.compare(k1, k2) <= 0); countTable.put(k2, countTable.get(k2) - (Integer.parseInt(rec2.f1))); rec1 = rec2; } for (Integer cnt : countTable.values()) { Assert.assertTrue(cnt == 0); } merger.close(); // if the combiner was opened, it must have been closed Assert.assertTrue(comb.opened == comb.closed); }
Example 14
Source File: ExternalSortITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSpillingSortWithIntermediateMerge() { try { // amount of pairs final int PAIRS = 10000000; // comparator final TypeComparator<Integer> keyComparator = new IntComparator(true); final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); final MutableObjectIterator<Tuple2<Integer, String>> source = new TestData.TupleGeneratorIterator(generator, PAIRS); // merge iterator LOG.debug("Initializing sortmerger..."); Sorter<Tuple2<Integer, String>> merger = new UnilateralSortMerger<>(this.memoryManager, this.ioManager, source, this.parentTask, this.pactRecordSerializer, this.pactRecordComparator, (double)64/78, 16, 0.7f, true /*use large record handler*/, false); // emit data LOG.debug("Emitting data..."); // check order MutableObjectIterator<Tuple2<Integer, String>> iterator = merger.getIterator(); LOG.debug("Checking results..."); int pairsRead = 1; int nextStep = PAIRS / 20; Tuple2<Integer, String> rec1 = new Tuple2<>(); Tuple2<Integer, String> rec2 = new Tuple2<>(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); while ((rec2 = iterator.next(rec2)) != null) { pairsRead++; Assert.assertTrue(keyComparator.compare(rec1.f0, rec2.f0) <= 0); Tuple2<Integer, String> tmp = rec1; rec1 = rec2; rec2 = tmp; // log if (pairsRead == nextStep) { nextStep += PAIRS / 20; } } Assert.assertEquals("Not all pairs were read back in.", PAIRS, pairsRead); merger.close(); testSuccess = true; } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 15
Source File: CombiningUnilateralSortMergerITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSortAndValidate() throws Exception { final Hashtable<Integer, Integer> countTable = new Hashtable<>(KEY_MAX); for (int i = 1; i <= KEY_MAX; i++) { countTable.put(i, 0); } // comparator final TypeComparator<Integer> keyComparator = new IntComparator(true); // reader TestData.MockTuple2Reader<Tuple2<Integer, String>> reader = TestData.getIntStringTupleReader(); // merge iterator LOG.debug("initializing sortmerger"); TestCountCombiner2 comb = new TestCountCombiner2(); Sorter<Tuple2<Integer, String>> merger = new CombiningUnilateralSortMerger<>(comb, this.memoryManager, this.ioManager, reader, this.parentTask, this.serializerFactory1, this.comparator1, 0.25, 2, 0.7f, true /* use large record handler */, false); // emit data LOG.debug("emitting data"); TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS; i++) { Assert.assertTrue((rec = generator.next(rec)) != null); final Integer key = rec.f0; rec.setField("1", 1); reader.emit(rec); countTable.put(key, countTable.get(key) + 1); } reader.close(); // check order MutableObjectIterator<Tuple2<Integer, String>> iterator = merger.getIterator(); LOG.debug("checking results"); Tuple2<Integer, String> rec1 = new Tuple2<>(); Tuple2<Integer, String> rec2 = new Tuple2<>(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); countTable.put(rec1.f0, countTable.get(rec1.f0) - (Integer.parseInt(rec1.f1))); while ((rec2 = iterator.next(rec2)) != null) { int k1 = rec1.f0; int k2 = rec2.f0; Assert.assertTrue(keyComparator.compare(k1, k2) <= 0); countTable.put(k2, countTable.get(k2) - (Integer.parseInt(rec2.f1))); rec1 = rec2; } for (Integer cnt : countTable.values()) { Assert.assertTrue(cnt == 0); } merger.close(); // if the combiner was opened, it must have been closed Assert.assertTrue(comb.opened == comb.closed); }
Example 16
Source File: NormalizedKeySorterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testReset() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); NormalizedKeySorter<Tuple2<Integer, String>> sorter = newSortBuffer(memory); TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the buffer full with the first set of records Tuple2<Integer, String> record = new Tuple2<>(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record)); sorter.reset(); // write a second sequence of records. since the values are of fixed length, we must be able to write an equal number generator = new TestData.TupleGenerator(SEED2, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the buffer full with the first set of records int num2 = -1; do { generator.next(record); num2++; } while (sorter.write(record)); Assert.assertEquals("The number of records written after the reset was not the same as before.", num, num2); // re-read the records generator.reset(); Tuple2<Integer, String> readTarget = new Tuple2<>(); int i = 0; while (i < num) { generator.next(record); readTarget = sorter.getRecord(readTarget, i++); int rk = readTarget.f0; int gk = record.f0; String rv = readTarget.f1; String gv = record.f1; Assert.assertEquals("The re-read key is wrong", gk, rk); Assert.assertEquals("The re-read value is wrong", gv, rv); } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example 17
Source File: NormalizedKeySorterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSortShortStringKeys() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); @SuppressWarnings("unchecked") TypeComparator<Tuple2<Integer, String>> accessors = TestData.getIntStringTupleTypeInfo().createComparator(new int[]{1}, new boolean[]{true}, 0, null); NormalizedKeySorter<Tuple2<Integer, String>> sorter = new NormalizedKeySorter<>(TestData.getIntStringTupleSerializer(), accessors, memory); TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, 5, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the records Tuple2<Integer, String> record = new Tuple2<>(); do { generator.next(record); } while (sorter.write(record)); QuickSort qs = new QuickSort(); qs.sort(sorter); MutableObjectIterator<Tuple2<Integer, String>> iter = sorter.getIterator(); Tuple2<Integer, String> readTarget = new Tuple2<>(); iter.next(readTarget); String last = readTarget.f1; while ((readTarget = iter.next(readTarget)) != null) { String current = readTarget.f1; final int cmp = last.compareTo(current); if (cmp > 0) { Assert.fail("Next value is not larger or equal to previous value."); } last = current; } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example 18
Source File: NormalizedKeySorterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSortLongStringKeys() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); @SuppressWarnings("unchecked") TypeComparator<Tuple2<Integer, String>> accessors = TestData.getIntStringTupleTypeInfo().createComparator(new int[]{1}, new boolean[]{true}, 0, null); NormalizedKeySorter<Tuple2<Integer, String>> sorter = new NormalizedKeySorter<>(TestData.getIntStringTupleSerializer(), accessors, memory); TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); // write the records Tuple2<Integer, String> record = new Tuple2<>(); do { generator.next(record); } while (sorter.write(record)); QuickSort qs = new QuickSort(); qs.sort(sorter); MutableObjectIterator<Tuple2<Integer, String>> iter = sorter.getIterator(); Tuple2<Integer, String> readTarget = new Tuple2<>(); iter.next(readTarget); String last = readTarget.f1; while ((readTarget = iter.next(readTarget)) != null) { String current = readTarget.f1; final int cmp = last.compareTo(current); if (cmp > 0) { Assert.fail("Next value is not larger or equal to previous value."); } last = current; } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
Example 19
Source File: ExternalSortITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testSpillingSortWithIntermediateMerge() { try { // amount of pairs final int PAIRS = 10000000; // comparator final TypeComparator<Integer> keyComparator = new IntComparator(true); final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); final MutableObjectIterator<Tuple2<Integer, String>> source = new TestData.TupleGeneratorIterator(generator, PAIRS); // merge iterator LOG.debug("Initializing sortmerger..."); Sorter<Tuple2<Integer, String>> merger = new UnilateralSortMerger<>(this.memoryManager, this.ioManager, source, this.parentTask, this.pactRecordSerializer, this.pactRecordComparator, (double)64/78, 16, 0.7f, true /*use large record handler*/, false); // emit data LOG.debug("Emitting data..."); // check order MutableObjectIterator<Tuple2<Integer, String>> iterator = merger.getIterator(); LOG.debug("Checking results..."); int pairsRead = 1; int nextStep = PAIRS / 20; Tuple2<Integer, String> rec1 = new Tuple2<>(); Tuple2<Integer, String> rec2 = new Tuple2<>(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); while ((rec2 = iterator.next(rec2)) != null) { pairsRead++; Assert.assertTrue(keyComparator.compare(rec1.f0, rec2.f0) <= 0); Tuple2<Integer, String> tmp = rec1; rec1 = rec2; rec2 = tmp; // log if (pairsRead == nextStep) { nextStep += PAIRS / 20; } } Assert.assertEquals("Not all pairs were read back in.", PAIRS, pairsRead); merger.close(); testSuccess = true; } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 20
Source File: ExternalSortITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSpillingSortWithIntermediateMerge() { try { // amount of pairs final int PAIRS = 10000000; // comparator final TypeComparator<Integer> keyComparator = new IntComparator(true); final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LENGTH, KeyMode.RANDOM, ValueMode.FIX_LENGTH); final MutableObjectIterator<Tuple2<Integer, String>> source = new TestData.TupleGeneratorIterator(generator, PAIRS); // merge iterator LOG.debug("Initializing sortmerger..."); Sorter<Tuple2<Integer, String>> merger = new UnilateralSortMerger<>(this.memoryManager, this.ioManager, source, this.parentTask, this.pactRecordSerializer, this.pactRecordComparator, (double)64/78, 16, 0.7f, true /*use large record handler*/, false); // emit data LOG.debug("Emitting data..."); // check order MutableObjectIterator<Tuple2<Integer, String>> iterator = merger.getIterator(); LOG.debug("Checking results..."); int pairsRead = 1; int nextStep = PAIRS / 20; Tuple2<Integer, String> rec1 = new Tuple2<>(); Tuple2<Integer, String> rec2 = new Tuple2<>(); Assert.assertTrue((rec1 = iterator.next(rec1)) != null); while ((rec2 = iterator.next(rec2)) != null) { pairsRead++; Assert.assertTrue(keyComparator.compare(rec1.f0, rec2.f0) <= 0); Tuple2<Integer, String> tmp = rec1; rec1 = rec2; rec2 = tmp; // log if (pairsRead == nextStep) { nextStep += PAIRS / 20; } } Assert.assertEquals("Not all pairs were read back in.", PAIRS, pairsRead); merger.close(); testSuccess = true; } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } }