org.apache.flink.runtime.operators.testutils.types.IntList Java Examples
The following examples show how to use
org.apache.flink.runtime.operators.testutils.types.IntList.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testEntryIterator() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); int result = 0; for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); result += lists[i].getKey(); } MutableObjectIterator<IntList> iter = table.getEntryIterator(); IntList target = new IntList(); int sum = 0; while((target = iter.next(target)) != null) { sum += target.getKey(); } table.close(); assertTrue(sum == result); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #2
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testEntryIterator() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); int result = 0; for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); result += lists[i].getKey(); } MutableObjectIterator<IntList> iter = table.getEntryIterator(); IntList target = new IntList(); int sum = 0; while((target = iter.next(target)) != null) { sum += target.getKey(); } table.close(); assertTrue(sum == result); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #3
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMultipleProbers() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); final IntPair[] pairs = getRandomizedIntPairs(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> listProber = table.getProber(comparatorV, pairComparatorV); AbstractHashTableProber<IntPair, IntList> pairProber = table.getProber(intPairComparator, pairComparatorPL); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(pairProber.getMatchFor(pairs[i], target)); assertNotNull(listProber.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #4
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
protected static IntList[] getRandomizedIntLists(int num, Random rnd) { IntList[] lists = new IntList[num]; for (int i = 0; i < num; i++) { int[] value = new int[rnd.nextInt(MAX_LIST_SIZE)+1]; //int[] value = new int[MAX_LIST_SIZE-1]; for (int j = 0; j < value.length; j++) { value[j] = -rnd.nextInt(Integer.MAX_VALUE); } lists[i] = new IntList(i, value); } return lists; }
Example #5
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testProberUpdate() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(""+i,prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); prober.updateMatch(overwriteLists[i]); } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #6
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testVariableLengthBuildAndRetrieveMajorityUpdated() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { if( i % 100 != 0) { table.insertOrReplaceRecord(overwriteLists[i]); lists[i] = overwriteLists[i]; } } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #7
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testVariableLengthBuildAndRetrieveMajorityUpdated() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { if( i % 100 != 0) { table.insertOrReplaceRecord(overwriteLists[i]); lists[i] = overwriteLists[i]; } } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #8
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMultipleProbers() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); final IntPair[] pairs = getRandomizedIntPairs(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> listProber = table.getProber(comparatorV, pairComparatorV); AbstractHashTableProber<IntPair, IntList> pairProber = table.getProber(intPairComparator, pairComparatorPL); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(pairProber.getMatchFor(pairs[i], target)); assertNotNull(listProber.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #9
Source File: MutableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
protected static IntList[] getRandomizedIntLists(int num, Random rnd) { IntList[] lists = new IntList[num]; for (int i = 0; i < num; i++) { int[] value = new int[rnd.nextInt(MAX_LIST_SIZE)+1]; //int[] value = new int[MAX_LIST_SIZE-1]; for (int j = 0; j < value.length; j++) { value[j] = -rnd.nextInt(Integer.MAX_VALUE); } lists[i] = new IntList(i, value); } return lists; }
Example #10
Source File: MutableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testProberUpdate() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(""+i,prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); prober.updateMatch(overwriteLists[i]); } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #11
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testProberUpdate() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(""+i,prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); prober.updateMatch(overwriteLists[i]); } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #12
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 5 votes |
protected static IntList[] getRandomizedIntLists(int num, Random rnd) { IntList[] lists = new IntList[num]; for (int i = 0; i < num; i++) { int[] value = new int[rnd.nextInt(MAX_LIST_SIZE)+1]; //int[] value = new int[MAX_LIST_SIZE-1]; for (int j = 0; j < value.length; j++) { value[j] = -rnd.nextInt(Integer.MAX_VALUE); } lists[i] = new IntList(i, value); } return lists; }
Example #13
Source File: MutableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testVariableLengthBuildAndRetrieveMajorityUpdated() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { if( i % 100 != 0) { table.insertOrReplaceRecord(overwriteLists[i]); lists[i] = overwriteLists[i]; } } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #14
Source File: MutableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testEntryIterator() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); int result = 0; for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); result += lists[i].getKey(); } MutableObjectIterator<IntList> iter = table.getEntryIterator(); IntList target = new IntList(); int sum = 0; while((target = iter.next(target)) != null) { sum += target.getKey(); } table.close(); assertTrue(sum == result); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #15
Source File: MutableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testMultipleProbers() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); final IntPair[] pairs = getRandomizedIntPairs(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> listProber = table.getProber(comparatorV, pairComparatorV); AbstractHashTableProber<IntPair, IntList> pairProber = table.getProber(intPairComparator, pairComparatorPL); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(pairProber.getMatchFor(pairs[i], target)); assertNotNull(listProber.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #16
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testRepeatedBuildAndRetrieve() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { try { table.insert(lists[i]); } catch (Exception e) { throw e; } } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } IntList[] overwriteLists; for(int k = 0; k < NUM_REWRITES; k++) { overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { table.insertOrReplaceRecord(overwriteLists[i]); } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #17
Source File: CompactingHashTableTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testResizeWithCompaction(){ // Only CompactingHashTable try { final int NUM_MEM_PAGES = (SIZE * NUM_LISTS / PAGE_SIZE); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); List<MemorySegment> memory = getMemory(NUM_MEM_PAGES); CompactingHashTable<IntList> table = new CompactingHashTable<IntList>(serializerV, comparatorV, memory); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } // make sure there is enough memory for resize memory.addAll(getMemory(ADDITIONAL_MEM)); Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable"); assertTrue(b); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { table.insertOrReplaceRecord(overwriteLists[i]); } Field list = Whitebox.getField(CompactingHashTable.class, "partitions"); @SuppressWarnings("unchecked") ArrayList<InMemoryPartition<IntList>> partitions = (ArrayList<InMemoryPartition<IntList>>) list.get(table); int numPartitions = partitions.size(); for(int i = 0; i < numPartitions; i++) { Whitebox.invokeMethod(table, "compactPartition", i); } // make sure there is enough memory for resize memory.addAll(getMemory(2*ADDITIONAL_MEM)); b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable"); assertTrue(b); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES + 3*ADDITIONAL_MEM, table.getFreeMemory().size()); } catch (Exception e) { e.printStackTrace(); fail("Error: " + e.getMessage()); } }
Example #18
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testVariableLengthBuildAndRetrieveMinorityUpdated() throws Exception { final int NUM_LISTS = 20000; final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final int STEP_SIZE = 100; final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS/STEP_SIZE, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i += STEP_SIZE) { overwriteLists[i/STEP_SIZE].setKey(overwriteLists[i/STEP_SIZE].getKey()*STEP_SIZE); table.insertOrReplaceRecord(overwriteLists[i/STEP_SIZE]); lists[i] = overwriteLists[i/STEP_SIZE]; } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #19
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testVariableLengthBuildAndRetrieve() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { try { table.insert(lists[i]); } catch (Exception e) { throw e; } } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { table.insertOrReplaceRecord(overwriteLists[i]); } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #20
Source File: CompactingHashTableTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testResizeWithCompaction(){ // Only CompactingHashTable try { final int NUM_MEM_PAGES = (SIZE * NUM_LISTS / PAGE_SIZE); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); List<MemorySegment> memory = getMemory(NUM_MEM_PAGES); CompactingHashTable<IntList> table = new CompactingHashTable<IntList>(serializerV, comparatorV, memory); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } // make sure there is enough memory for resize memory.addAll(getMemory(ADDITIONAL_MEM)); Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable"); assertTrue(b); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { table.insertOrReplaceRecord(overwriteLists[i]); } Field list = Whitebox.getField(CompactingHashTable.class, "partitions"); @SuppressWarnings("unchecked") ArrayList<InMemoryPartition<IntList>> partitions = (ArrayList<InMemoryPartition<IntList>>) list.get(table); int numPartitions = partitions.size(); for(int i = 0; i < numPartitions; i++) { Whitebox.invokeMethod(table, "compactPartition", i); } // make sure there is enough memory for resize memory.addAll(getMemory(2*ADDITIONAL_MEM)); b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable"); assertTrue(b); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES + 3*ADDITIONAL_MEM, table.getFreeMemory().size()); } catch (Exception e) { e.printStackTrace(); fail("Error: " + e.getMessage()); } }
Example #21
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testRepeatedBuildAndRetrieve() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { try { table.insert(lists[i]); } catch (Exception e) { throw e; } } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } IntList[] overwriteLists; for(int k = 0; k < NUM_REWRITES; k++) { overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { table.insertOrReplaceRecord(overwriteLists[i]); } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #22
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testVariableLengthBuildAndRetrieveMinorityUpdated() throws Exception { final int NUM_LISTS = 20000; final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final int STEP_SIZE = 100; final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS/STEP_SIZE, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i += STEP_SIZE) { overwriteLists[i/STEP_SIZE].setKey(overwriteLists[i/STEP_SIZE].getKey()*STEP_SIZE); table.insertOrReplaceRecord(overwriteLists[i/STEP_SIZE]); lists[i] = overwriteLists[i/STEP_SIZE]; } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #23
Source File: MutableHashTableTestBase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testVariableLengthBuildAndRetrieve() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { try { table.insert(lists[i]); } catch (Exception e) { throw e; } } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { table.insertOrReplaceRecord(overwriteLists[i]); } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #24
Source File: CompactingHashTableTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testResizeWithCompaction(){ // Only CompactingHashTable try { final int NUM_MEM_PAGES = (SIZE * NUM_LISTS / PAGE_SIZE); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); List<MemorySegment> memory = getMemory(NUM_MEM_PAGES); CompactingHashTable<IntList> table = new CompactingHashTable<IntList>(serializerV, comparatorV, memory); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } // make sure there is enough memory for resize memory.addAll(getMemory(ADDITIONAL_MEM)); Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable"); assertTrue(b); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { table.insertOrReplaceRecord(overwriteLists[i]); } Field list = Whitebox.getField(CompactingHashTable.class, "partitions"); @SuppressWarnings("unchecked") ArrayList<InMemoryPartition<IntList>> partitions = (ArrayList<InMemoryPartition<IntList>>) list.get(table); int numPartitions = partitions.size(); for(int i = 0; i < numPartitions; i++) { Whitebox.invokeMethod(table, "compactPartition", i); } // make sure there is enough memory for resize memory.addAll(getMemory(2*ADDITIONAL_MEM)); b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable"); assertTrue(b); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES + 3*ADDITIONAL_MEM, table.getFreeMemory().size()); } catch (Exception e) { e.printStackTrace(); fail("Error: " + e.getMessage()); } }
Example #25
Source File: MutableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testRepeatedBuildAndRetrieve() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { try { table.insert(lists[i]); } catch (Exception e) { throw e; } } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } IntList[] overwriteLists; for(int k = 0; k < NUM_REWRITES; k++) { overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { table.insertOrReplaceRecord(overwriteLists[i]); } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #26
Source File: MutableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testVariableLengthBuildAndRetrieveMinorityUpdated() throws Exception { final int NUM_LISTS = 20000; final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final int STEP_SIZE = 100; final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { table.insert(lists[i]); } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS/STEP_SIZE, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i += STEP_SIZE) { overwriteLists[i/STEP_SIZE].setKey(overwriteLists[i/STEP_SIZE].getKey()*STEP_SIZE); table.insertOrReplaceRecord(overwriteLists[i/STEP_SIZE]); lists[i] = overwriteLists[i/STEP_SIZE]; } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }
Example #27
Source File: MutableHashTableTestBase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testVariableLengthBuildAndRetrieve() throws Exception { final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE; AbstractMutableHashTable<IntList> table = getHashTable(serializerV, comparatorV, getMemory(NUM_MEM_PAGES)); final Random rnd = new Random(RANDOM_SEED); final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd); table.open(); for (int i = 0; i < NUM_LISTS; i++) { try { table.insert(lists[i]); } catch (Exception e) { throw e; } } AbstractHashTableProber<IntList, IntList> prober = table.getProber(comparatorV, pairComparatorV); IntList target = new IntList(); for (int i = 0; i < NUM_LISTS; i++) { assertNotNull(prober.getMatchFor(lists[i], target)); assertArrayEquals(lists[i].getValue(), target.getValue()); } final IntList[] overwriteLists = getRandomizedIntLists(NUM_LISTS, rnd); // test replacing for (int i = 0; i < NUM_LISTS; i++) { table.insertOrReplaceRecord(overwriteLists[i]); } for (int i = 0; i < NUM_LISTS; i++) { assertNotNull("" + i, prober.getMatchFor(overwriteLists[i], target)); assertArrayEquals(overwriteLists[i].getValue(), target.getValue()); } table.close(); assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size()); }