org.apache.flink.types.Record Java Examples
The following examples show how to use
org.apache.flink.types.Record.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CombineTaskExternalITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void reduce(Iterable<Record> records, Collector<Record> out) { Record element = null; int sum = 0; for (Record next : records) { element = next; element.getField(1, this.value); sum += this.value.getValue(); } element.getField(0, this.key); this.value.setValue(sum - this.key.getValue()); element.setField(1, this.value); out.collect(element); }
Example #2
Source File: CombineTaskExternalITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void combine(Iterable<Record> records, Collector<Record> out) { Record element = null; int sum = 0; for (Record next : records) { element = next; element.getField(1, this.combineValue); sum += this.combineValue.getValue(); } if (++this.cnt >= 10) { throw new ExpectedTestException(); } this.combineValue.setValue(sum); element.setField(1, this.combineValue); out.collect(element); }
Example #3
Source File: NirvanaOutputList.java From flink with Apache License 2.0 | 6 votes |
@Override public Iterator<Record> iterator() { return new Iterator<Record>() { @Override public boolean hasNext() { return false; } @Override public Record next() { return null; } @Override public void remove() { throw new UnsupportedOperationException(); } }; }
Example #4
Source File: CombineTaskExternalITCase.java From flink with Apache License 2.0 | 6 votes |
@Override public void combine(Iterable<Record> records, Collector<Record> out) { Record element = null; int sum = 0; for (Record next : records) { element = next; element.getField(1, this.combineValue); sum += this.combineValue.getValue(); } if (++this.cnt >= 10) { throw new ExpectedTestException(); } this.combineValue.setValue(sum); element.setField(1, this.combineValue); out.collect(element); }
Example #5
Source File: RecordPairComparatorFactory.java From flink with Apache License 2.0 | 6 votes |
@Override public TypePairComparator<Record, Record> createComparator12( TypeComparator<Record> comparator1, TypeComparator<Record> comparator2) { if (!(comparator1 instanceof RecordComparator && comparator2 instanceof RecordComparator)) { throw new IllegalArgumentException("Cannot instantiate pair comparator from the given comparators."); } final RecordComparator prc1 = (RecordComparator) comparator1; final RecordComparator prc2 = (RecordComparator) comparator2; final int[] pos1 = prc1.getKeyPositions(); final int[] pos2 = prc2.getKeyPositions(); final Class<? extends Value>[] types1 = prc1.getKeyTypes(); final Class<? extends Value>[] types2 = prc2.getKeyTypes(); checkComparators(pos1, pos2, types1, types2); return new RecordPairComparator(pos1, pos2, types1); }
Example #6
Source File: DataSourceTaskTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static void prepareInputFile(MutableObjectIterator<Record> inIt, File inputFile, boolean insertInvalidData) throws IOException { try (BufferedWriter bw = new BufferedWriter(new FileWriter(inputFile))) { if (insertInvalidData) { bw.write("####_I_AM_INVALID_########\n"); } Record rec = new Record(); while ((rec = inIt.next(rec)) != null) { IntValue key = rec.getField(0, IntValue.class); IntValue value = rec.getField(1, IntValue.class); bw.write(key.getValue() + "_" + value.getValue() + "\n"); } if (insertInvalidData) { bw.write("####_I_AM_INVALID_########\n"); } bw.flush(); } }
Example #7
Source File: DataSourceTaskTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public Record readRecord(Record target, byte[] record, int offset, int numBytes) { String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET); try { this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_")))); this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length()))); } catch(RuntimeException re) { return null; } target.setField(0, this.key); target.setField(1, this.value); return target; }
Example #8
Source File: ReduceTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testFailingReduceTask() { final int keyCnt = 100; final int valCnt = 20; addInput(new UniformRecordGenerator(keyCnt, valCnt, true)); addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); try { testDriver(testTask, MockFailingReduceStub.class); Assert.fail("Function exception was not forwarded."); } catch (ExpectedTestException eetex) { // Good! } catch (Exception e) { LOG.info("Exception which was not the ExpectedTestException while running the test task.", e); Assert.fail("Test caused exception: " + e.getMessage()); } this.outList.clear(); }
Example #9
Source File: ReduceTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Override public void combine(Iterable<Record> records, Collector<Record> out) { Record element = null; int sum = 0; for (Record next : records) { element = next; element.getField(1, this.combineValue); sum += this.combineValue.getValue(); } this.combineValue.setValue(sum); element.setField(1, this.combineValue); out.collect(element); }
Example #10
Source File: ReduceTaskTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testFailingReduceTask() { final int keyCnt = 100; final int valCnt = 20; addInput(new UniformRecordGenerator(keyCnt, valCnt, true)); addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); try { testDriver(testTask, MockFailingReduceStub.class); Assert.fail("Function exception was not forwarded."); } catch (ExpectedTestException eetex) { // Good! } catch (Exception e) { LOG.info("Exception which was not the ExpectedTestException while running the test task.", e); Assert.fail("Test caused exception: " + e.getMessage()); } this.outList.clear(); }
Example #11
Source File: BinaryInputFormatTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testGetStatisticsMultiplePaths() throws IOException { final int blockInfoSize = new BlockInfo().getInfoSize(); final int blockSize = blockInfoSize + 8; final int numBlocks1 = 3; final int numBlocks2 = 5; final File tempFile = createBinaryInputFile("binary_input_format_test", blockSize, numBlocks1); final File tempFile2 = createBinaryInputFile("binary_input_format_test_2", blockSize, numBlocks2); final BinaryInputFormat<Record> inputFormat = new MyBinaryInputFormat(); inputFormat.setFilePaths(tempFile.toURI().toString(), tempFile2.toURI().toString()); inputFormat.setBlockSize(blockSize); BaseStatistics stats = inputFormat.getStatistics(null); Assert.assertEquals("The file size statistics is wrong", blockSize * (numBlocks1 + numBlocks2), stats.getTotalInputSize()); }
Example #12
Source File: CrossTaskExternalITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testExternalBlockCrossTask() { int keyCnt1 = 2; int valCnt1 = 1; // 43690 fit into memory, 43691 do not! int keyCnt2 = 43700; int valCnt2 = 1; final int expCnt = keyCnt1*valCnt1*keyCnt2*valCnt2; setOutput(this.output); addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); getTaskConfig().setDriverStrategy(DriverStrategy.NESTEDLOOP_BLOCKED_OUTER_FIRST); getTaskConfig().setRelativeMemoryDriver(cross_frac); final CrossDriver<Record, Record, Record> testTask = new CrossDriver<Record, Record, Record>(); try { testDriver(testTask, MockCrossStub.class); } catch (Exception e) { e.printStackTrace(); Assert.fail("Test failed due to an exception."); } Assert.assertEquals("Wrong result size.", expCnt, this.output.getNumberOfRecords()); }
Example #13
Source File: DataSinkTaskTest.java From flink with Apache License 2.0 | 5 votes |
@Override public void writeRecord(Record rec) throws IOException { if (++this.cnt >= 10) { throw new RuntimeException("Expected Test Exception"); } super.writeRecord(rec); }
Example #14
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMultiKeys() { final int numberOfChannels = 100; final int numRecords = 5000; final TypeComparator<Record> multiComp = new RecordComparatorFactory( new int[] {0,1, 3}, new Class[] {IntValue.class, StringValue.class, DoubleValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, multiComp, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); int[] hits = new int[numberOfChannels]; for (int i = 0; i < numRecords; i++) { Record record = new Record(4); record.setField(0, new IntValue(i)); record.setField(1, new StringValue("AB" + i + "CD" + i)); record.setField(3, new DoubleValue(i * 3.141d)); delegate.setInstance(record); int channel = selector.selectChannel(delegate); hits[channel]++; } int totalHitCount = 0; for (int hit : hits) { assertTrue(hit > 0); totalHitCount += hit; } assertTrue(totalHitCount == numRecords); }
Example #15
Source File: DataSinkTaskTest.java From flink with Apache License 2.0 | 5 votes |
@Test @SuppressWarnings("unchecked") public void testFailingSortingDataSinkTask() { int keyCnt = 100; int valCnt = 20; double memoryFraction = 1.0; super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE); super.addInput(new UniformRecordGenerator(keyCnt, valCnt, true), 0); DataSinkTask<Record> testTask = new DataSinkTask<>(this.mockEnv); Configuration stubParams = new Configuration(); // set sorting super.getTaskConfig().setInputLocalStrategy(0, LocalStrategy.SORT); super.getTaskConfig().setInputComparator( new RecordComparatorFactory(new int[]{1}, ( new Class[]{IntValue.class})), 0); super.getTaskConfig().setRelativeMemoryInput(0, memoryFraction); super.getTaskConfig().setFilehandlesInput(0, 8); super.getTaskConfig().setSpillingThresholdInput(0, 0.8f); File tempTestFile = new File(tempFolder.getRoot(), UUID.randomUUID().toString()); super.registerFileOutputTask(MockFailingOutputFormat.class, tempTestFile.toURI().toString(), stubParams); boolean stubFailed = false; try { testTask.invoke(); } catch (Exception e) { stubFailed = true; } Assert.assertTrue("Function exception was not forwarded.", stubFailed); // assert that temp file was removed Assert.assertFalse("Temp output file has not been removed", tempTestFile.exists()); }
Example #16
Source File: ReduceTaskTest.java From flink with Apache License 2.0 | 5 votes |
@Override public void reduce(Iterable<Record> records, Collector<Record> out) { for (@SuppressWarnings("unused") Record r : records) { try { Thread.sleep(100); } catch (InterruptedException e) {} } }
Example #17
Source File: CrossTaskTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testBlockEmptyInnerCrossTask() { int keyCnt1 = 10; int valCnt1 = 1; int keyCnt2 = 0; int valCnt2 = 0; final int expCnt = keyCnt1*valCnt1*keyCnt2*valCnt2; setOutput(this.output); addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); getTaskConfig().setDriverStrategy(DriverStrategy.NESTEDLOOP_BLOCKED_OUTER_FIRST); getTaskConfig().setRelativeMemoryDriver(cross_frac); final CrossDriver<Record, Record, Record> testTask = new CrossDriver<>(); try { testDriver(testTask, MockCrossStub.class); } catch (Exception e) { e.printStackTrace(); Assert.fail("Test failed due to an exception."); } Assert.assertEquals("Wrong result size.", expCnt, this.output.getNumberOfRecords()); }
Example #18
Source File: ReduceTaskTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testReduceTaskOnPreSortedInput() { final int keyCnt = 100; final int valCnt = 20; addInput(new UniformRecordGenerator(keyCnt, valCnt, true)); addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); try { testDriver(testTask, MockReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); Assert.fail("Invoke method caused exception: " + e.getMessage()); } Assert.assertTrue("Resultset size was "+this.outList.size()+". Expected was "+keyCnt, this.outList.size() == keyCnt); for(Record record : this.outList) { Assert.assertTrue("Incorrect result", record.getField(1, IntValue.class).getValue() == valCnt-record.getField(0, IntValue.class).getValue()); } this.outList.clear(); }
Example #19
Source File: ChainTaskTest.java From flink with Apache License 2.0 | 5 votes |
@Override public void reduce(Iterable<Record> records, Collector<Record> out) throws Exception { if (++this.cnt >= 5) { throw new RuntimeException("Expected Test Exception"); } for (Record r : records) { out.collect(r); } }
Example #20
Source File: MutableObjectIteratorWrapper.java From flink with Apache License 2.0 | 5 votes |
@Override public Record next(Record reuse) throws IOException { if (this.source.hasNext()) { return this.source.next(); } else { return null; } }
Example #21
Source File: JoinTaskTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void join(Record record1, Record record2, Collector<Record> out) throws Exception { try { Thread.sleep(100); } catch (InterruptedException e) { } }
Example #22
Source File: JoinTaskTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void join(Record record1, Record record2, Collector<Record> out) throws Exception { if (++this.cnt >= 10) { throw new ExpectedTestException(); } out.collect(record1); }
Example #23
Source File: JoinTaskTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testFailingHashSecondMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(hash_frac); JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { testDriver(testTask, MockFailingMatchStub.class); Assert.fail("Function exception was not forwarded."); } catch (ExpectedTestException etex) { // good! } catch (Exception e) { e.printStackTrace(); Assert.fail("Test caused an exception."); } }
Example #24
Source File: CrossTaskTest.java From flink with Apache License 2.0 | 5 votes |
@Override public Record cross(Record record1, Record record2) { if (++this.cnt >= 10) { throw new ExpectedTestException(); } return record1; }
Example #25
Source File: JoinTaskTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSortBoth1MatchTask() { final int keyCnt1 = 20; final int valCnt1 = 1; final int keyCnt2 = 10; final int valCnt2 = 2; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate()); addInputSorted(new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate()); testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); Assert.fail("The test caused an exception."); } final int expCnt = valCnt1*valCnt2*Math.min(keyCnt1, keyCnt2); Assert.assertTrue("Resultset size was " + this.outList.size() + ". Expected was " + expCnt, this.outList.size() == expCnt); this.outList.clear(); }
Example #26
Source File: JoinTaskTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testHash3MatchTask() { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST); getTaskConfig().setRelativeMemoryDriver(hash_frac); JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); Assert.fail("Test caused an exception."); } final int expCnt = valCnt1*valCnt2*Math.min(keyCnt1, keyCnt2); Assert.assertEquals("Wrong result set size.", expCnt, this.outList.size()); this.outList.clear(); }
Example #27
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 5 votes |
private int[] getSelectedChannelsHitCount( ShipStrategyType shipStrategyType, int numRecords, int numberOfChannels, Enum recordType) { final TypeComparator<Record> comparator = new RecordComparatorFactory( new int[] {0}, new Class[] {recordType == RecordType.INTEGER ? IntValue.class : StringValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector(shipStrategyType, comparator, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); return getSelectedChannelsHitCount(selector, delegate, recordType, numRecords, numberOfChannels); }
Example #28
Source File: DataSinkTaskTest.java From flink with Apache License 2.0 | 5 votes |
@Test @SuppressWarnings("unchecked") public void testFailingSortingDataSinkTask() { int keyCnt = 100; int valCnt = 20; double memoryFraction = 1.0; super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE); super.addInput(new UniformRecordGenerator(keyCnt, valCnt, true), 0); DataSinkTask<Record> testTask = new DataSinkTask<>(this.mockEnv); Configuration stubParams = new Configuration(); // set sorting super.getTaskConfig().setInputLocalStrategy(0, LocalStrategy.SORT); super.getTaskConfig().setInputComparator( new RecordComparatorFactory(new int[]{1}, ( new Class[]{IntValue.class})), 0); super.getTaskConfig().setRelativeMemoryInput(0, memoryFraction); super.getTaskConfig().setFilehandlesInput(0, 8); super.getTaskConfig().setSpillingThresholdInput(0, 0.8f); File tempTestFile = new File(tempFolder.getRoot(), UUID.randomUUID().toString()); super.registerFileOutputTask(MockFailingOutputFormat.class, tempTestFile.toURI().toString(), stubParams); boolean stubFailed = false; try { testTask.invoke(); } catch (Exception e) { stubFailed = true; } Assert.assertTrue("Function exception was not forwarded.", stubFailed); // assert that temp file was removed Assert.assertFalse("Temp output file has not been removed", tempTestFile.exists()); }
Example #29
Source File: JoinTaskTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testSortFirstMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInputSorted(new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate()); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, true)); testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); Assert.fail("The test caused an exception."); } int expCnt = valCnt1*valCnt2*Math.min(keyCnt1, keyCnt2); Assert.assertTrue("Resultset size was "+this.outList.size()+". Expected was "+expCnt, this.outList.size() == expCnt); this.outList.clear(); }
Example #30
Source File: SerializedFormatTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override protected BinaryOutputFormat<Record> createOutputFormat(String path, Configuration configuration) throws IOException { final SerializedOutputFormat<Record> outputFormat = new SerializedOutputFormat<Record>(); outputFormat.setOutputFilePath(new Path(path)); outputFormat.setWriteMode(FileSystem.WriteMode.OVERWRITE); configuration = configuration == null ? new Configuration() : configuration; outputFormat.configure(configuration); outputFormat.open(0, 1); return outputFormat; }