Java Code Examples for org.apache.flink.types.Record#setField()
The following examples show how to use
org.apache.flink.types.Record#setField() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ReduceTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Override public void reduce(Iterable<Record> records, Collector<Record> out) { Record element = null; int valCnt = 0; for (Record next : records) { element = next; valCnt++; } if (++this.cnt >= 10) { throw new ExpectedTestException(); } element.getField(0, this.key); this.value.setValue(valCnt - this.key.getValue()); element.setField(1, this.value); out.collect(element); }
Example 2
Source File: DataSourceTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Override public Record readRecord(Record target, byte[] record, int offset, int numBytes) { try { Thread.sleep(100); } catch (InterruptedException e) { return null; } String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET); try { this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_")))); this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length()))); } catch(RuntimeException re) { return null; } target.setField(0, this.key); target.setField(1, this.value); return target; }
Example 3
Source File: DataSourceTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Override public Record readRecord(Record target, byte[] record, int offset, int numBytes) { try { Thread.sleep(100); } catch (InterruptedException e) { return null; } String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET); try { this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_")))); this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length()))); } catch(RuntimeException re) { return null; } target.setField(0, this.key); target.setField(1, this.value); return target; }
Example 4
Source File: CombineTaskExternalITCase.java From flink with Apache License 2.0 | 6 votes |
@Override public void reduce(Iterable<Record> records, Collector<Record> out) { Record element = null; int sum = 0; for (Record next : records) { element = next; element.getField(1, this.value); sum += this.value.getValue(); } element.getField(0, this.key); this.value.setValue(sum - this.key.getValue()); element.setField(1, this.value); out.collect(element); }
Example 5
Source File: DataSourceTaskTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public Record readRecord(Record target, byte[] record, int offset, int numBytes) { if(this.cnt == 10) { throw new RuntimeException("Excpected Test Exception."); } this.cnt++; String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET); try { this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_")))); this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length()))); } catch(RuntimeException re) { return null; } target.setField(0, this.key); target.setField(1, this.value); return target; }
Example 6
Source File: ReduceTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Override public void combine(Iterable<Record> records, Collector<Record> out) { Record element = null; int sum = 0; for (Record next : records) { element = next; element.getField(1, this.combineValue); sum += this.combineValue.getValue(); } this.combineValue.setValue(sum); element.setField(1, this.combineValue); out.collect(element); }
Example 7
Source File: DataSourceTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Override public Record readRecord(Record target, byte[] record, int offset, int numBytes) { if(this.cnt == 10) { throw new RuntimeException("Excpected Test Exception."); } this.cnt++; String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET); try { this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_")))); this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length()))); } catch(RuntimeException re) { return null; } target.setField(0, this.key); target.setField(1, this.value); return target; }
Example 8
Source File: ReduceTaskExternalITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void combine(Iterable<Record> records, Collector<Record> out) { Record element = null; int sum = 0; for (Record next : records) { element = next; element.getField(1, this.combineValue); sum += this.combineValue.getValue(); } this.combineValue.setValue(sum); element.setField(1, this.combineValue); out.collect(element); }
Example 9
Source File: DataSourceTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Override public Record readRecord(Record target, byte[] record, int offset, int numBytes) { String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET); try { this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_")))); this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length()))); } catch(RuntimeException re) { return null; } target.setField(0, this.key); target.setField(1, this.value); return target; }
Example 10
Source File: ReduceTaskTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void combine(Iterable<Record> records, Collector<Record> out) { Record element = null; int sum = 0; for (Record next : records) { element = next; element.getField(1, this.combineValue); sum += this.combineValue.getValue(); } this.combineValue.setValue(sum); element.setField(1, this.combineValue); out.collect(element); }
Example 11
Source File: ReduceTaskExternalITCase.java From flink with Apache License 2.0 | 6 votes |
@Override public void reduce(Iterable<Record> records, Collector<Record> out) { Record element = null; int sum = 0; for (Record next : records) { element = next; element.getField(1, this.value); sum += this.value.getValue(); } element.getField(0, this.key); this.value.setValue(sum - this.key.getValue()); element.setField(1, this.value); out.collect(element); }
Example 12
Source File: OutputEmitterTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private boolean verifyWrongPartitionHashKey(int position, int fieldNum) { final TypeComparator<Record> comparator = new RecordComparatorFactory( new int[] {position}, new Class[] {IntValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, comparator, 100); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); Record record = new Record(2); record.setField(fieldNum, new IntValue(1)); delegate.setInstance(record); try { selector.selectChannel(delegate); } catch (NullKeyFieldException re) { Assert.assertEquals(position, re.getFieldNumber()); return true; } return false; }
Example 13
Source File: ReduceTaskTest.java From flink with Apache License 2.0 | 5 votes |
@Override public void reduce(Iterable<Record> records, Collector<Record> out) { Record element = null; int cnt = 0; for (Record next : records) { element = next; cnt++; } element.getField(0, this.key); this.value.setValue(cnt - this.key.getValue()); element.setField(1, this.value); out.collect(element); }
Example 14
Source File: ChainedAllReduceDriverTest.java From flink with Apache License 2.0 | 5 votes |
@Override public Record reduce(Record value1, Record value2) throws Exception { IntValue v1 = value1.getField(0, IntValue.class); IntValue v2 = value2.getField(0, IntValue.class); // set value and force update of record; this updates and returns // value1 in order to test ChainedAllReduceDriver.collect() when // object reuse is enabled v1.setValue(v1.getValue() + v2.getValue()); value1.setField(0, v1); value1.updateBinaryRepresenation(); return value1; }
Example 15
Source File: OutputEmitterTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testMultiKeys() { final int numberOfChannels = 100; final int numRecords = 5000; final TypeComparator<Record> multiComp = new RecordComparatorFactory( new int[] {0,1, 3}, new Class[] {IntValue.class, StringValue.class, DoubleValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, multiComp, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); int[] hits = new int[numberOfChannels]; for (int i = 0; i < numRecords; i++) { Record record = new Record(4); record.setField(0, new IntValue(i)); record.setField(1, new StringValue("AB" + i + "CD" + i)); record.setField(3, new DoubleValue(i * 3.141d)); delegate.setInstance(record); int channel = selector.selectChannel(delegate); hits[channel]++; } int totalHitCount = 0; for (int hit : hits) { assertTrue(hit > 0); totalHitCount += hit; } assertTrue(totalHitCount == numRecords); }
Example 16
Source File: OutputEmitterTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMultiKeys() { final int numberOfChannels = 100; final int numRecords = 5000; final TypeComparator<Record> multiComp = new RecordComparatorFactory( new int[] {0,1, 3}, new Class[] {IntValue.class, StringValue.class, DoubleValue.class}).createComparator(); final ChannelSelector<SerializationDelegate<Record>> selector = createChannelSelector( ShipStrategyType.PARTITION_HASH, multiComp, numberOfChannels); final SerializationDelegate<Record> delegate = new SerializationDelegate<>(new RecordSerializerFactory().getSerializer()); int[] hits = new int[numberOfChannels]; for (int i = 0; i < numRecords; i++) { Record record = new Record(4); record.setField(0, new IntValue(i)); record.setField(1, new StringValue("AB" + i + "CD" + i)); record.setField(3, new DoubleValue(i * 3.141d)); delegate.setInstance(record); int channel = selector.selectChannel(delegate); hits[channel]++; } int totalHitCount = 0; for (int hit : hits) { assertTrue(hit > 0); totalHitCount += hit; } assertTrue(totalHitCount == numRecords); }
Example 17
Source File: HashTableITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public Record next(Record reuse) { if (this.numLeft > 0) { this.numLeft--; reuse.clear(); reuse.setField(0, this.key); reuse.setField(1, this.value); return reuse; } else { return null; } }
Example 18
Source File: ReduceTaskExternalITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void reduce(Iterable<Record> records, Collector<Record> out) { Record element = null; int cnt = 0; for (Record next : records) { element = next; cnt++; } element.getField(0, this.key); this.value.setValue(cnt - this.key.getValue()); element.setField(1, this.value); out.collect(element); }
Example 19
Source File: ReduceTaskExternalITCase.java From flink with Apache License 2.0 | 5 votes |
@Override public void reduce(Iterable<Record> records, Collector<Record> out) { Record element = null; int cnt = 0; for (Record next : records) { element = next; cnt++; } element.getField(0, this.key); this.value.setValue(cnt - this.key.getValue()); element.setField(1, this.value); out.collect(element); }
Example 20
Source File: InfiniteInputIterator.java From flink with Apache License 2.0 | 4 votes |
@Override public Record next(Record reuse) { reuse.setField(0, val1); reuse.setField(1, val2); return reuse; }