org.apache.flink.runtime.testutils.recordutils.RecordSerializerFactory Java Examples
The following examples show how to use
org.apache.flink.runtime.testutils.recordutils.RecordSerializerFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 6 votes |
private boolean verifyWrongPartitionHashKey(int position, int fieldNum) { final TypeComparator<Record> comparator = new RecordComparatorFactory( new int[] {position}, new Class[] {IntValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, comparator, 100); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); Record record = new Record(2); record.setField(fieldNum, new IntValue(1)); delegate.setInstance(record); try { selector.selectChannel(delegate); } catch (NullKeyFieldException re) { Assert.assertEquals(position, re.getFieldNumber()); return true; } return false; }
Example #2
Source File: OutputEmitterTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private boolean verifyWrongPartitionHashKey(int position, int fieldNum) { final TypeComparator<Record> comparator = new RecordComparatorFactory( new int[] {position}, new Class[] {IntValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, comparator, 100); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); Record record = new Record(2); record.setField(fieldNum, new IntValue(1)); delegate.setInstance(record); try { selector.selectChannel(delegate); } catch (NullKeyFieldException re) { Assert.assertEquals(position, re.getFieldNumber()); return true; } return false; }
Example #3
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 6 votes |
private boolean verifyWrongPartitionHashKey(int position, int fieldNum) { final TypeComparator<Record> comparator = new RecordComparatorFactory( new int[] {position}, new Class[] {IntValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, comparator, 100); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); Record record = new Record(2); record.setField(fieldNum, new IntValue(1)); delegate.setInstance(record); try { selector.selectChannel(delegate); } catch (NullKeyFieldException re) { Assert.assertEquals(position, re.getFieldNumber()); return true; } return false; }
Example #4
Source File: TaskTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public IteratorWrappingTestSingleInputGate<Record> addInput(MutableObjectIterator<Record> input, int groupId, boolean read) { final IteratorWrappingTestSingleInputGate<Record> reader = this.mockEnv.addInput(input); TaskConfig conf = new TaskConfig(this.mockEnv.getTaskConfiguration()); conf.addInputToGroup(groupId); conf.setInputSerializer(RecordSerializerFactory.get(), groupId); if (read) { reader.notifyNonEmpty(); } return reader; }
Example #5
Source File: DriverTestBase.java From flink with Apache License 2.0 | 5 votes |
public void addInputSorted(MutableObjectIterator<Record> input, RecordComparator comp) throws Exception { UnilateralSortMerger<Record> sorter = new UnilateralSortMerger<Record>( this.memManager, this.ioManager, input, this.owner, RecordSerializerFactory.get(), comp, this.perSortFractionMem, 32, 0.8f, true /*use large record handler*/, true); this.sorters.add(sorter); this.inputs.add(null); }
Example #6
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 5 votes |
private int[] getSelectedChannelsHitCount( ShipStrategyType shipStrategyType, int numRecords, int numberOfChannels, Enum recordType) { final TypeComparator<Record> comparator = new RecordComparatorFactory( new int[] {0}, new Class[] {recordType == RecordType.INTEGER ? IntValue.class : StringValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector(shipStrategyType, comparator, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); return getSelectedChannelsHitCount(selector, delegate, recordType, numRecords, numberOfChannels); }
Example #7
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMultiKeys() { final int numberOfChannels = 100; final int numRecords = 5000; final TypeComparator<Record> multiComp = new RecordComparatorFactory( new int[] {0,1, 3}, new Class[] {IntValue.class, StringValue.class, DoubleValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, multiComp, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); int[] hits = new int[numberOfChannels]; for (int i = 0; i < numRecords; i++) { Record record = new Record(4); record.setField(0, new IntValue(i)); record.setField(1, new StringValue("AB" + i + "CD" + i)); record.setField(3, new DoubleValue(i * 3.141d)); delegate.setInstance(record); int channel = selector.selectChannel(delegate); hits[channel]++; } int totalHitCount = 0; for (int hit : hits) { assertTrue(hit > 0); totalHitCount += hit; } assertTrue(totalHitCount == numRecords); }
Example #8
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMultiKeys() { final int numberOfChannels = 100; final int numRecords = 5000; final TypeComparator<Record> multiComp = new RecordComparatorFactory( new int[] {0,1, 3}, new Class[] {IntValue.class, StringValue.class, DoubleValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, multiComp, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); int[] hits = new int[numberOfChannels]; for (int i = 0; i < numRecords; i++) { Record record = new Record(4); record.setField(0, new IntValue(i)); record.setField(1, new StringValue("AB" + i + "CD" + i)); record.setField(3, new DoubleValue(i * 3.141d)); delegate.setInstance(record); int channel = selector.selectChannel(delegate); hits[channel]++; } int totalHitCount = 0; for (int hit : hits) { assertTrue(hit > 0); totalHitCount += hit; } assertTrue(totalHitCount == numRecords); }
Example #9
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 5 votes |
private int[] getSelectedChannelsHitCount( ShipStrategyType shipStrategyType, int numRecords, int numberOfChannels, Enum recordType) { final TypeComparator<Record> comparator = new RecordComparatorFactory( new int[] {0}, new Class[] {recordType == RecordType.INTEGER ? IntValue.class : StringValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector(shipStrategyType, comparator, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); return getSelectedChannelsHitCount(selector, delegate, recordType, numRecords, numberOfChannels); }
Example #10
Source File: DriverTestBase.java From flink with Apache License 2.0 | 5 votes |
public void addInputSorted(MutableObjectIterator<Record> input, RecordComparator comp) throws Exception { UnilateralSortMerger<Record> sorter = new UnilateralSortMerger<Record>( this.memManager, this.ioManager, input, this.owner, RecordSerializerFactory.get(), comp, this.perSortFractionMem, 32, 0.8f, true /*use large record handler*/, true); this.sorters.add(sorter); this.inputs.add(null); }
Example #11
Source File: DriverTestBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public void addInputSorted(MutableObjectIterator<Record> input, RecordComparator comp) throws Exception { UnilateralSortMerger<Record> sorter = new UnilateralSortMerger<Record>( this.memManager, this.ioManager, input, this.owner, RecordSerializerFactory.get(), comp, this.perSortFractionMem, 32, 0.8f, true /*use large record handler*/, true); this.sorters.add(sorter); this.inputs.add(null); }
Example #12
Source File: TaskTestBase.java From flink with Apache License 2.0 | 5 votes |
public IteratorWrappingTestSingleInputGate<Record> addInput(MutableObjectIterator<Record> input, int groupId, boolean read) { final IteratorWrappingTestSingleInputGate<Record> reader = this.mockEnv.addInput(input); TaskConfig conf = new TaskConfig(this.mockEnv.getTaskConfiguration()); conf.addInputToGroup(groupId); conf.setInputSerializer(RecordSerializerFactory.get(), groupId); if (read) { reader.notifyNonEmpty(); } return reader; }
Example #13
Source File: OutputEmitterTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private int[] getSelectedChannelsHitCount( ShipStrategyType shipStrategyType, int numRecords, int numberOfChannels, Enum recordType) { final TypeComparator<Record> comparator = new RecordComparatorFactory( new int[] {0}, new Class[] {recordType == RecordType.INTEGER ? IntValue.class : StringValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector(shipStrategyType, comparator, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); return getSelectedChannelsHitCount(selector, delegate, recordType, numRecords, numberOfChannels); }
Example #14
Source File: OutputEmitterTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testMultiKeys() { final int numberOfChannels = 100; final int numRecords = 5000; final TypeComparator<Record> multiComp = new RecordComparatorFactory( new int[] {0,1, 3}, new Class[] {IntValue.class, StringValue.class, DoubleValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, multiComp, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); int[] hits = new int[numberOfChannels]; for (int i = 0; i < numRecords; i++) { Record record = new Record(4); record.setField(0, new IntValue(i)); record.setField(1, new StringValue("AB" + i + "CD" + i)); record.setField(3, new DoubleValue(i * 3.141d)); delegate.setInstance(record); int channel = selector.selectChannel(delegate); hits[channel]++; } int totalHitCount = 0; for (int hit : hits) { assertTrue(hit > 0); totalHitCount += hit; } assertTrue(totalHitCount == numRecords); }
Example #15
Source File: TaskTestBase.java From flink with Apache License 2.0 | 5 votes |
public IteratorWrappingTestSingleInputGate<Record> addInput(MutableObjectIterator<Record> input, int groupId, boolean read) { final IteratorWrappingTestSingleInputGate<Record> reader = this.mockEnv.addInput(input); TaskConfig conf = new TaskConfig(this.mockEnv.getTaskConfiguration()); conf.addInputToGroup(groupId); conf.setInputSerializer(RecordSerializerFactory.get(), groupId); if (read) { reader.notifyNonEmpty(); } return reader; }
Example #16
Source File: ReduceTaskExternalITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSingleLevelMergeCombiningReduceTask() { final int keyCnt = 8192; final int valCnt = 8; addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); CombiningUnilateralSortMerger<Record> sorter = null; try { sorter = new CombiningUnilateralSortMerger<>(new MockCombiningReduceStub(), getMemoryManager(), getIOManager(), new UniformRecordGenerator(keyCnt, valCnt, false), getContainingTask(), RecordSerializerFactory.get(), this.comparator.duplicate(), this.perSortFractionMem, 2, 0.8f, true /* use large record handler */, true); addInput(sorter.getIterator()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockCombiningReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); Assert.fail("Invoke method caused exception: " + e.getMessage()); } finally { if (sorter != null) { sorter.close(); } } int expSum = 0; for (int i = 1; i < valCnt; i++) { expSum += i; } Assert.assertTrue("Resultset size was "+this.outList.size()+". Expected was "+keyCnt, this.outList.size() == keyCnt); for (Record record : this.outList) { Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum-record.getField(0, IntValue.class).getValue()); } this.outList.clear(); }
Example #17
Source File: ReduceTaskExternalITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testMultiLevelMergeCombiningReduceTask() { int keyCnt = 32768; int valCnt = 8; addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); CombiningUnilateralSortMerger<Record> sorter = null; try { sorter = new CombiningUnilateralSortMerger<>(new MockCombiningReduceStub(), getMemoryManager(), getIOManager(), new UniformRecordGenerator(keyCnt, valCnt, false), getContainingTask(), RecordSerializerFactory.get(), this.comparator.duplicate(), this.perSortFractionMem, 2, 0.8f, true /* use large record handler */, false); addInput(sorter.getIterator()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockCombiningReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); Assert.fail("Invoke method caused exception: " + e.getMessage()); } finally { if (sorter != null) { sorter.close(); } } int expSum = 0; for (int i = 1; i < valCnt; i++) { expSum += i; } Assert.assertTrue("Resultset size was "+this.outList.size()+". Expected was "+keyCnt, this.outList.size() == keyCnt); for (Record record : this.outList) { Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum-record.getField(0, IntValue.class).getValue()); } this.outList.clear(); }
Example #18
Source File: TaskTestBase.java From flink with Apache License 2.0 | 4 votes |
public void addOutput(List<Record> output) { this.mockEnv.addOutput(output); TaskConfig conf = new TaskConfig(this.mockEnv.getTaskConfiguration()); conf.addOutputShipStrategy(ShipStrategyType.FORWARD); conf.setOutputSerializer(RecordSerializerFactory.get()); }
Example #19
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testForcedRebalance() { final int numberOfChannels = 100; int toTaskIndex = numberOfChannels * 6 / 7; int fromTaskIndex = toTaskIndex + numberOfChannels; int extraRecords = numberOfChannels / 3; int numRecords = 50000 + extraRecords; final SerializationDelegate<Record> delegate = new SerializationDelegate<>( new RecordSerializerFactory().getSerializer()); final ChannelSelector<SerializationDelegate<Record>> selector = new OutputEmitter<>( ShipStrategyType.PARTITION_FORCED_REBALANCE, fromTaskIndex); selector.setup(numberOfChannels); // Test for IntValue int[] hits = getSelectedChannelsHitCount(selector, delegate, RecordType.INTEGER, numRecords, numberOfChannels); int totalHitCount = 0; for (int i = 0; i < hits.length; i++) { if (toTaskIndex <= i || i < toTaskIndex+extraRecords - numberOfChannels) { assertTrue(hits[i] == (numRecords / numberOfChannels) + 1); } else { assertTrue(hits[i] == numRecords / numberOfChannels); } totalHitCount += hits[i]; } assertTrue(totalHitCount == numRecords); toTaskIndex = numberOfChannels / 5; fromTaskIndex = toTaskIndex + 2 * numberOfChannels; extraRecords = numberOfChannels * 2 / 9; numRecords = 10000 + extraRecords; // Test for StringValue final ChannelSelector<SerializationDelegate<Record>> selector2 = new OutputEmitter<>( ShipStrategyType.PARTITION_FORCED_REBALANCE, fromTaskIndex); selector2.setup(numberOfChannels); hits = getSelectedChannelsHitCount(selector2, delegate, RecordType.STRING, numRecords, numberOfChannels); totalHitCount = 0; for (int i = 0; i < hits.length; i++) { if (toTaskIndex <= i && i < toTaskIndex + extraRecords) { assertTrue(hits[i] == (numRecords / numberOfChannels) + 1); } else { assertTrue(hits[i] == numRecords / numberOfChannels); } totalHitCount += hits[i]; } assertTrue(totalHitCount == numRecords); }
Example #20
Source File: ReduceTaskTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testCombiningReduceTask() { final int keyCnt = 100; final int valCnt = 20; addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); CombiningUnilateralSortMerger<Record> sorter = null; try { sorter = new CombiningUnilateralSortMerger<>(new MockCombiningReduceStub(), getMemoryManager(), getIOManager(), new UniformRecordGenerator(keyCnt, valCnt, false), getContainingTask(), RecordSerializerFactory.get(), this.comparator.duplicate(), this.perSortFractionMem, 4, 0.8f, true /* use large record handler */, true); addInput(sorter.getIterator()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockCombiningReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); Assert.fail("Invoke method caused exception: " + e.getMessage()); } finally { if (sorter != null) { sorter.close(); } } int expSum = 0; for (int i = 1; i < valCnt; i++) { expSum += i; } Assert.assertTrue("Resultset size was "+this.outList.size()+". Expected was "+keyCnt, this.outList.size() == keyCnt); for(Record record : this.outList) { Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum-record.getField(0, IntValue.class).getValue()); } this.outList.clear(); }
Example #21
Source File: DriverTestBase.java From flink with Apache License 2.0 | 4 votes |
@Override public <X> TypeSerializerFactory<X> getInputSerializer(int index) { @SuppressWarnings("unchecked") TypeSerializerFactory<X> factory = (TypeSerializerFactory<X>) RecordSerializerFactory.get(); return factory; }
Example #22
Source File: ReduceTaskExternalITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testSingleLevelMergeCombiningReduceTask() { final int keyCnt = 8192; final int valCnt = 8; addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); CombiningUnilateralSortMerger<Record> sorter = null; try { sorter = new CombiningUnilateralSortMerger<>(new MockCombiningReduceStub(), getMemoryManager(), getIOManager(), new UniformRecordGenerator(keyCnt, valCnt, false), getContainingTask(), RecordSerializerFactory.get(), this.comparator.duplicate(), this.perSortFractionMem, 2, 0.8f, true /* use large record handler */, true); addInput(sorter.getIterator()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockCombiningReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); Assert.fail("Invoke method caused exception: " + e.getMessage()); } finally { if (sorter != null) { sorter.close(); } } int expSum = 0; for (int i = 1; i < valCnt; i++) { expSum += i; } Assert.assertTrue("Resultset size was "+this.outList.size()+". Expected was "+keyCnt, this.outList.size() == keyCnt); for (Record record : this.outList) { Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum-record.getField(0, IntValue.class).getValue()); } this.outList.clear(); }
Example #23
Source File: TaskTestBase.java From flink with Apache License 2.0 | 4 votes |
public void addOutput(List<Record> output) { this.mockEnv.addOutput(output); TaskConfig conf = new TaskConfig(this.mockEnv.getTaskConfiguration()); conf.addOutputShipStrategy(ShipStrategyType.FORWARD); conf.setOutputSerializer(RecordSerializerFactory.get()); }
Example #24
Source File: DriverTestBase.java From flink with Apache License 2.0 | 4 votes |
@Override public <X> TypeSerializerFactory<X> getInputSerializer(int index) { @SuppressWarnings("unchecked") TypeSerializerFactory<X> factory = (TypeSerializerFactory<X>) RecordSerializerFactory.get(); return factory; }
Example #25
Source File: ReduceTaskTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testCombiningReduceTask() { final int keyCnt = 100; final int valCnt = 20; addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); CombiningUnilateralSortMerger<Record> sorter = null; try { sorter = new CombiningUnilateralSortMerger<>(new MockCombiningReduceStub(), getMemoryManager(), getIOManager(), new UniformRecordGenerator(keyCnt, valCnt, false), getContainingTask(), RecordSerializerFactory.get(), this.comparator.duplicate(), this.perSortFractionMem, 4, 0.8f, true /* use large record handler */, true); addInput(sorter.getIterator()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockCombiningReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); Assert.fail("Invoke method caused exception: " + e.getMessage()); } finally { if (sorter != null) { sorter.close(); } } int expSum = 0; for (int i = 1; i < valCnt; i++) { expSum += i; } Assert.assertTrue("Resultset size was "+this.outList.size()+". Expected was "+keyCnt, this.outList.size() == keyCnt); for(Record record : this.outList) { Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum-record.getField(0, IntValue.class).getValue()); } this.outList.clear(); }
Example #26
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testForcedRebalance() { final int numberOfChannels = 100; int toTaskIndex = numberOfChannels * 6 / 7; int fromTaskIndex = toTaskIndex + numberOfChannels; int extraRecords = numberOfChannels / 3; int numRecords = 50000 + extraRecords; final SerializationDelegate<Record> delegate = new SerializationDelegate<>( new RecordSerializerFactory().getSerializer()); final ChannelSelector<SerializationDelegate<Record>> selector = new OutputEmitter<>( ShipStrategyType.PARTITION_FORCED_REBALANCE, fromTaskIndex); selector.setup(numberOfChannels); // Test for IntValue int[] hits = getSelectedChannelsHitCount(selector, delegate, RecordType.INTEGER, numRecords, numberOfChannels); int totalHitCount = 0; for (int i = 0; i < hits.length; i++) { if (toTaskIndex <= i || i < toTaskIndex+extraRecords - numberOfChannels) { assertTrue(hits[i] == (numRecords / numberOfChannels) + 1); } else { assertTrue(hits[i] == numRecords / numberOfChannels); } totalHitCount += hits[i]; } assertTrue(totalHitCount == numRecords); toTaskIndex = numberOfChannels / 5; fromTaskIndex = toTaskIndex + 2 * numberOfChannels; extraRecords = numberOfChannels * 2 / 9; numRecords = 10000 + extraRecords; // Test for StringValue final ChannelSelector<SerializationDelegate<Record>> selector2 = new OutputEmitter<>( ShipStrategyType.PARTITION_FORCED_REBALANCE, fromTaskIndex); selector2.setup(numberOfChannels); hits = getSelectedChannelsHitCount(selector2, delegate, RecordType.STRING, numRecords, numberOfChannels); totalHitCount = 0; for (int i = 0; i < hits.length; i++) { if (toTaskIndex <= i && i < toTaskIndex + extraRecords) { assertTrue(hits[i] == (numRecords / numberOfChannels) + 1); } else { assertTrue(hits[i] == numRecords / numberOfChannels); } totalHitCount += hits[i]; } assertTrue(totalHitCount == numRecords); }
Example #27
Source File: ReduceTaskExternalITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testMultiLevelMergeCombiningReduceTask() { int keyCnt = 32768; int valCnt = 8; addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); CombiningUnilateralSortMerger<Record> sorter = null; try { sorter = new CombiningUnilateralSortMerger<>(new MockCombiningReduceStub(), getMemoryManager(), getIOManager(), new UniformRecordGenerator(keyCnt, valCnt, false), getContainingTask(), RecordSerializerFactory.get(), this.comparator.duplicate(), this.perSortFractionMem, 2, 0.8f, true /* use large record handler */, false); addInput(sorter.getIterator()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockCombiningReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); Assert.fail("Invoke method caused exception: " + e.getMessage()); } finally { if (sorter != null) { sorter.close(); } } int expSum = 0; for (int i = 1; i < valCnt; i++) { expSum += i; } Assert.assertTrue("Resultset size was "+this.outList.size()+". Expected was "+keyCnt, this.outList.size() == keyCnt); for (Record record : this.outList) { Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum-record.getField(0, IntValue.class).getValue()); } this.outList.clear(); }
Example #28
Source File: ReduceTaskExternalITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSingleLevelMergeCombiningReduceTask() { final int keyCnt = 8192; final int valCnt = 8; addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); CombiningUnilateralSortMerger<Record> sorter = null; try { sorter = new CombiningUnilateralSortMerger<>(new MockCombiningReduceStub(), getMemoryManager(), getIOManager(), new UniformRecordGenerator(keyCnt, valCnt, false), getContainingTask(), RecordSerializerFactory.get(), this.comparator.duplicate(), this.perSortFractionMem, 2, 0.8f, true /* use large record handler */, true); addInput(sorter.getIterator()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockCombiningReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); Assert.fail("Invoke method caused exception: " + e.getMessage()); } finally { if (sorter != null) { sorter.close(); } } int expSum = 0; for (int i = 1; i < valCnt; i++) { expSum += i; } Assert.assertTrue("Resultset size was "+this.outList.size()+". Expected was "+keyCnt, this.outList.size() == keyCnt); for (Record record : this.outList) { Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == expSum-record.getField(0, IntValue.class).getValue()); } this.outList.clear(); }
Example #29
Source File: TaskTestBase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
public void addOutput(List<Record> output) { this.mockEnv.addOutput(output); TaskConfig conf = new TaskConfig(this.mockEnv.getTaskConfiguration()); conf.addOutputShipStrategy(ShipStrategyType.FORWARD); conf.setOutputSerializer(RecordSerializerFactory.get()); }
Example #30
Source File: DriverTestBase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Override public <X> TypeSerializerFactory<X> getInputSerializer(int index) { @SuppressWarnings("unchecked") TypeSerializerFactory<X> factory = (TypeSerializerFactory<X>) RecordSerializerFactory.get(); return factory; }