org.apache.flink.types.StringValue Java Examples
The following examples show how to use
org.apache.flink.types.StringValue.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AggregateITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testFullAggregateOfMutableValueTypes() throws Exception { /* * Full Aggregate of mutable value types */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<IntValue, LongValue, StringValue>> ds = ValueCollectionDataSets.get3TupleDataSet(env); DataSet<Tuple2<IntValue, LongValue>> aggregateDs = ds .aggregate(Aggregations.SUM, 0) .and(Aggregations.MAX, 1) .project(0, 1); List<Tuple2<IntValue, LongValue>> result = aggregateDs.collect(); String expected = "231,6\n"; compareResultAsTuples(result, expected); }
Example #2
Source File: GenericCsvInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReadInvalidContentsLenient() { try { final String fileContent = "abc|222|def|444\nkkz|777|888|hhg"; final FileInputSplit split = createTempFile(fileContent); final Configuration parameters = new Configuration(); format.setFieldDelimiter("|"); format.setFieldTypesGeneric(StringValue.class, IntValue.class, StringValue.class, IntValue.class); format.setLenient(true); format.configure(parameters); format.open(split); Value[] values = new Value[] { new StringValue(), new IntValue(), new StringValue(), new IntValue() }; assertNotNull(format.nextRecord(values)); assertNull(format.nextRecord(values)); } catch (Exception ex) { fail("Test failed due to a " + ex.getClass().getSimpleName() + ": " + ex.getMessage()); } }
Example #3
Source File: StringValueArray.java From flink with Apache License 2.0 | 6 votes |
@Override public String toString() { StringBuilder sb = new StringBuilder("["); String separator = ""; for (StringValue sv : this) { sb .append(sv.getValue()) .append(separator); separator = ","; } sb.append("]"); return sb.toString(); }
Example #4
Source File: LocalPropertiesFilteringTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testSortingErased() { SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString(sp, new String[]{"0;5"}, null, null, tupleInfo, tupleInfo); Ordering o = new Ordering(); o.appendOrdering(2, IntValue.class, Order.ASCENDING); o.appendOrdering(0, StringValue.class, Order.DESCENDING); o.appendOrdering(5, LongValue.class, Order.DESCENDING); LocalProperties lProps = LocalProperties.forOrdering(o); LocalProperties filtered = lProps.filterBySemanticProperties(sp, 0); FieldList gFields = filtered.getGroupedFields(); Ordering order = filtered.getOrdering(); assertNull(gFields); assertNull(order); assertNull(filtered.getUniqueFields()); }
Example #5
Source File: TranslateTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testTranslateGraphIds() throws Exception { Graph<StringValue, LongValue, LongValue> stringIdGraph = graph .translateGraphIds(new LongValueToStringValue()); for (Vertex<StringValue, LongValue> vertex : stringIdGraph.getVertices().collect()) { assertEquals(StringValue.class, vertex.f0.getClass()); assertEquals(LongValue.class, vertex.f1.getClass()); } for (Edge<StringValue, LongValue> edge : stringIdGraph.getEdges().collect()) { assertEquals(StringValue.class, edge.f0.getClass()); assertEquals(StringValue.class, edge.f1.getClass()); assertEquals(LongValue.class, edge.f2.getClass()); } TestBaseUtils.compareResultAsText(stringIdGraph.getVertices().collect(), expectedVertexResult); TestBaseUtils.compareResultAsText(stringIdGraph.getEdges().collect(), expectedEdgeResult); }
Example #6
Source File: AccumulatorITCase.java From flink with Apache License 2.0 | 6 votes |
@Override protected void postSubmit() throws Exception { compareResultsByLinesInMemory(EXPECTED, resultPath); // Test accumulator results System.out.println("Accumulator results:"); JobExecutionResult res = this.result; System.out.println(AccumulatorHelper.getResultsFormatted(res.getAllAccumulatorResults())); Assert.assertEquals(Integer.valueOf(3), res.getAccumulatorResult("num-lines")); Assert.assertEquals(Integer.valueOf(3), res.getIntCounterResult("num-lines")); Assert.assertEquals(Double.valueOf(getParallelism()), res.getAccumulatorResult("open-close-counter")); // Test histogram (words per line distribution) Map<Integer, Integer> dist = new HashMap<>(); dist.put(1, 1); dist.put(2, 1); dist.put(3, 1); Assert.assertEquals(dist, res.getAccumulatorResult("words-per-line")); // Test distinct words (custom accumulator) Set<StringValue> distinctWords = new HashSet<>(); distinctWords.add(new StringValue("one")); distinctWords.add(new StringValue("two")); distinctWords.add(new StringValue("three")); Assert.assertEquals(distinctWords, res.getAccumulatorResult("distinct-words")); }
Example #7
Source File: DriverTestData.java From flink with Apache License 2.0 | 6 votes |
public static List<Tuple2<StringValue, IntValue>> createReduceMutableData() { List<Tuple2<StringValue, IntValue>> data = new ArrayList<Tuple2<StringValue, IntValue>>(); data.add(new Tuple2<StringValue, IntValue>(new StringValue("a"), new IntValue(1))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("b"), new IntValue(2))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("c"), new IntValue(3))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("d"), new IntValue(4))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("d"), new IntValue(5))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("e"), new IntValue(6))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("e"), new IntValue(7))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("e"), new IntValue(8))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("f"), new IntValue(9))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("f"), new IntValue(10))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("f"), new IntValue(11))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("f"), new IntValue(12))); return data; }
Example #8
Source File: StringValueArray.java From flink with Apache License 2.0 | 6 votes |
@Override public boolean addAll(ValueArray<StringValue> other) { StringValueArray source = (StringValueArray) other; int sourceSize = source.position; int newPosition = position + sourceSize; if (newPosition > data.length) { if (isBounded) { return false; } else { ensureCapacity(newPosition); } } System.arraycopy(source.data, 0, data, position, sourceSize); length += source.length; position = newPosition; return true; }
Example #9
Source File: StringValueUtils.java From flink with Apache License 2.0 | 6 votes |
/** * Gets the next token from the string. If another token is available, the token is stored * in the given target StringValue object. * * @param target The StringValue object to store the next token in. * @return True, if there was another token, false if not. */ public boolean next(StringValue target) { final char[] data = this.toTokenize.getCharArray(); final int limit = this.limit; int pos = this.pos; // skip the delimiter for (; pos < limit && Character.isWhitespace(data[pos]); pos++) { } if (pos >= limit) { this.pos = pos; return false; } final int start = pos; for (; pos < limit && !Character.isWhitespace(data[pos]); pos++) { } this.pos = pos; target.setValue(this.toTokenize, start, pos - start); return true; }
Example #10
Source File: ValueArrayFactory.java From flink with Apache License 2.0 | 6 votes |
/** * Produce a {@code ValueArray} for the given {@code Value} type. * * @param cls {@code Value} class * @return {@code ValueArray} for given {@code Value} class */ @SuppressWarnings("unchecked") public static <T> ValueArray<T> createValueArray(Class<? extends Value> cls) { if (ByteValue.class.isAssignableFrom(cls)) { return (ValueArray<T>) new ByteValueArray(); } else if (CharValue.class.isAssignableFrom(cls)) { return (ValueArray<T>) new CharValueArray(); } else if (DoubleValue.class.isAssignableFrom(cls)) { return (ValueArray<T>) new DoubleValueArray(); } else if (FloatValue.class.isAssignableFrom(cls)) { return (ValueArray<T>) new FloatValueArray(); } else if (IntValue.class.isAssignableFrom(cls)) { return (ValueArray<T>) new IntValueArray(); } else if (LongValue.class.isAssignableFrom(cls)) { return (ValueArray<T>) new LongValueArray(); } else if (NullValue.class.isAssignableFrom(cls)) { return (ValueArray<T>) new NullValueArray(); } else if (ShortValue.class.isAssignableFrom(cls)) { return (ValueArray<T>) new ShortValueArray(); } else if (StringValue.class.isAssignableFrom(cls)) { return (ValueArray<T>) new StringValueArray(); } else { throw new IllegalArgumentException("Unable to create unbounded ValueArray for type " + cls); } }
Example #11
Source File: CsvReaderITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testValueTypes() throws Exception { final String inputData = "ABC,true,1,2,3,4,5.0,6.0\nBCD,false,1,2,3,4,5.0,6.0"; final String dataPath = createInputData(inputData); final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> data = env.readCsvFile(dataPath).types(StringValue.class, BooleanValue.class, ByteValue.class, ShortValue.class, IntValue.class, LongValue.class, FloatValue.class, DoubleValue.class); List<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> result = data.collect(); expected = inputData; compareResultAsTuples(result, expected); }
Example #12
Source File: StringValueSummaryAggregatorTest.java From flink with Apache License 2.0 | 5 votes |
/** * Helper method for summarizing a list of values. * * <p>This method breaks the rule of "testing only one thing" by aggregating and combining * a bunch of different ways. */ @Override protected StringColumnSummary summarize(String... values) { StringValue[] stringValues = new StringValue[values.length]; for (int i = 0; i < values.length; i++) { if (values[i] != null) { stringValues[i] = new StringValue(values[i]); } } return new AggregateCombineHarness<StringValue, StringColumnSummary, ValueSummaryAggregator.StringValueSummaryAggregator>(){ @Override protected void compareResults(StringColumnSummary result1, StringColumnSummary result2) { Assert.assertEquals(result1.getEmptyCount(), result2.getEmptyCount()); Assert.assertEquals(result1.getMaxLength(), result2.getMaxLength()); Assert.assertEquals(result1.getMinLength(), result2.getMinLength()); if (result1.getMeanLength() == null) { Assert.assertEquals(result1.getMeanLength(), result2.getMeanLength()); } else { Assert.assertEquals(result1.getMeanLength().doubleValue(), result2.getMeanLength().doubleValue(), 1e-5d); } Assert.assertEquals(result1.getNullCount(), result2.getNullCount()); Assert.assertEquals(result1.getNonNullCount(), result2.getNonNullCount()); } }.summarize(stringValues); }
Example #13
Source File: ScalaCsvOutputFormat.java From flink with Apache License 2.0 | 5 votes |
@Override public void writeRecord(T element) throws IOException { int numFields = element.productArity(); for (int i = 0; i < numFields; i++) { Object v = element.productElement(i); if (v != null) { if (i != 0) { this.wrt.write(this.fieldDelimiter); } if (quoteStrings) { if (v instanceof String || v instanceof StringValue) { this.wrt.write('"'); this.wrt.write(v.toString()); this.wrt.write('"'); } else { this.wrt.write(v.toString()); } } else { this.wrt.write(v.toString()); } } else { if (this.allowNullValues) { if (i != 0) { this.wrt.write(this.fieldDelimiter); } } else { throw new RuntimeException("Cannot write tuple with <null> value at position: " + i); } } } // add the record delimiter this.wrt.write(this.recordDelimiter); }
Example #14
Source File: NodeId.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void serialize(NodeId record, DataOutputView target) throws IOException { if (record != null) { target.writeByte(1); eventIdSerializer.serialize(record.eventId, target); StringValue.writeString(record.pageName, target); } else { target.writeByte(0); } }
Example #15
Source File: ValueCollectionDataSets.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static DataSet<Tuple5<IntValue, LongValue, IntValue, StringValue, LongValue>> get5TupleDataSet(ExecutionEnvironment env) { List<Tuple5<IntValue, LongValue, IntValue, StringValue, LongValue>> data = new ArrayList<>(); data.add(new Tuple5<>(new IntValue(1), new LongValue(1L), new IntValue(0), new StringValue("Hallo"), new LongValue(1L))); data.add(new Tuple5<>(new IntValue(2), new LongValue(2L), new IntValue(1), new StringValue("Hallo Welt"), new LongValue(2L))); data.add(new Tuple5<>(new IntValue(2), new LongValue(3L), new IntValue(2), new StringValue("Hallo Welt wie"), new LongValue(1L))); data.add(new Tuple5<>(new IntValue(3), new LongValue(4L), new IntValue(3), new StringValue("Hallo Welt wie gehts?"), new LongValue(2L))); data.add(new Tuple5<>(new IntValue(3), new LongValue(5L), new IntValue(4), new StringValue("ABC"), new LongValue(2L))); data.add(new Tuple5<>(new IntValue(3), new LongValue(6L), new IntValue(5), new StringValue("BCD"), new LongValue(3L))); data.add(new Tuple5<>(new IntValue(4), new LongValue(7L), new IntValue(6), new StringValue("CDE"), new LongValue(2L))); data.add(new Tuple5<>(new IntValue(4), new LongValue(8L), new IntValue(7), new StringValue("DEF"), new LongValue(1L))); data.add(new Tuple5<>(new IntValue(4), new LongValue(9L), new IntValue(8), new StringValue("EFG"), new LongValue(1L))); data.add(new Tuple5<>(new IntValue(4), new LongValue(10L), new IntValue(9), new StringValue("FGH"), new LongValue(2L))); data.add(new Tuple5<>(new IntValue(5), new LongValue(11L), new IntValue(10), new StringValue("GHI"), new LongValue(1L))); data.add(new Tuple5<>(new IntValue(5), new LongValue(12L), new IntValue(11), new StringValue("HIJ"), new LongValue(3L))); data.add(new Tuple5<>(new IntValue(5), new LongValue(13L), new IntValue(12), new StringValue("IJK"), new LongValue(3L))); data.add(new Tuple5<>(new IntValue(5), new LongValue(14L), new IntValue(13), new StringValue("JKL"), new LongValue(2L))); data.add(new Tuple5<>(new IntValue(5), new LongValue(15L), new IntValue(14), new StringValue("KLM"), new LongValue(2L))); Collections.shuffle(data); TupleTypeInfo<Tuple5<IntValue, LongValue, IntValue, StringValue, LongValue>> type = new TupleTypeInfo<>( ValueTypeInfo.INT_VALUE_TYPE_INFO, ValueTypeInfo.LONG_VALUE_TYPE_INFO, ValueTypeInfo.INT_VALUE_TYPE_INFO, ValueTypeInfo.STRING_VALUE_TYPE_INFO, ValueTypeInfo.LONG_VALUE_TYPE_INFO ); return env.fromCollection(data, type); }
Example #16
Source File: ValueCollectionDataSets.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static DataSet<Tuple2<Tuple2<IntValue, IntValue>, StringValue>> getSmallNestedTupleDataSet(ExecutionEnvironment env) { List<Tuple2<Tuple2<IntValue, IntValue>, StringValue>> data = new ArrayList<>(); data.add(new Tuple2<>(new Tuple2<>(new IntValue(1), new IntValue(1)), new StringValue("one"))); data.add(new Tuple2<>(new Tuple2<>(new IntValue(2), new IntValue(2)), new StringValue("two"))); data.add(new Tuple2<>(new Tuple2<>(new IntValue(3), new IntValue(3)), new StringValue("three"))); TupleTypeInfo<Tuple2<Tuple2<IntValue, IntValue>, StringValue>> type = new TupleTypeInfo<>( new TupleTypeInfo<Tuple2<IntValue, IntValue>>(ValueTypeInfo.INT_VALUE_TYPE_INFO, ValueTypeInfo.INT_VALUE_TYPE_INFO), ValueTypeInfo.STRING_VALUE_TYPE_INFO ); return env.fromCollection(data, type); }
Example #17
Source File: NFAStateSerializer.java From flink with Apache License 2.0 | 5 votes |
private void copySingleComputationState(DataInputView source, DataOutputView target) throws IOException { StringValue.copyString(source, target); NodeId prevState = nodeIdSerializer.deserialize(source); nodeIdSerializer.serialize(prevState, target); DeweyNumber version = versionSerializer.deserialize(source); versionSerializer.serialize(version, target); long startTimestamp = source.readLong(); target.writeLong(startTimestamp); copyStartEvent(source, target); }
Example #18
Source File: GenericCsvInputFormatTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void readWithParseQuotedStrings() { try { final String fileContent = "\"ab\\\"c\"|\"def\"\n\"ghijk\"|\"abc\""; final FileInputSplit split = createTempFile(fileContent); final Configuration parameters = new Configuration(); format.setFieldDelimiter("|"); format.setFieldTypesGeneric(StringValue.class, StringValue.class); format.enableQuotedStringParsing('"'); format.configure(parameters); format.open(split); Value[] values = new Value[] { new StringValue(), new StringValue()}; values = format.nextRecord(values); assertNotNull(values); assertEquals("ab\\\"c", ((StringValue) values[0]).getValue()); assertEquals("def", ((StringValue) values[1]).getValue()); values = format.nextRecord(values); assertNotNull(values); assertEquals("ghijk", ((StringValue) values[0]).getValue()); assertEquals("abc", ((StringValue) values[1]).getValue()); } catch (Exception ex) { fail("Test failed due to a " + ex.getClass().getSimpleName() + ": " + ex.getMessage()); } }
Example #19
Source File: StringValueComparatorTest.java From flink with Apache License 2.0 | 5 votes |
@Override protected StringValue[] getSortedTestData() { return new StringValue[]{ new StringValue(""), new StringValue("Lorem Ipsum Dolor Omit Longer"), new StringValue("aaaa"), new StringValue("abcd"), new StringValue("abce"), new StringValue("abdd"), new StringValue("accd"), new StringValue("bbcd") }; }
Example #20
Source File: NFAStateSerializer.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private void serializeSingleComputationState( ComputationState computationState, DataOutputView target) throws IOException { StringValue.writeString(computationState.getCurrentStateName(), target); nodeIdSerializer.serialize(computationState.getPreviousBufferEntry(), target); versionSerializer.serialize(computationState.getVersion(), target); target.writeLong(computationState.getStartTimestamp()); serializeStartEvent(computationState.getStartEventID(), target); }
Example #21
Source File: StringArraySerializer.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public String[] deserialize(DataInputView source) throws IOException { final int len = source.readInt(); String[] array = new String[len]; for (int i = 0; i < len; i++) { array[i] = StringValue.readString(source); } return array; }
Example #22
Source File: VarLengthStringParserTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testParseValidMixedStrings() { this.parser = new StringValueParser(); this.parser.enableQuotedStringParsing((byte)'@'); // check valid strings with out whitespaces and trailing delimiter byte[] recBytes = "@abcde|gh@|@i@|jklmnopq|@rs@|tuv".getBytes(ConfigConstants.DEFAULT_CHARSET); StringValue s = new StringValue(); int startPos = 0; startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[] {'|'}, s); assertTrue(startPos == 11); assertTrue(s.getValue().equals("abcde|gh")); startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[] {'|'}, s); assertTrue(startPos == 15); assertTrue(s.getValue().equals("i")); startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[] {'|'}, s); assertTrue(startPos == 24); assertTrue(s.getValue().equals("jklmnopq")); startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[] {'|'}, s); assertTrue(startPos == 29); assertTrue(s.getValue().equals("rs")); startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[] {'|'}, s); assertTrue(startPos == 32); assertTrue(s.getValue().equals("tuv")); }
Example #23
Source File: DistinctTranslationTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void translateDistinctPosition() { try { final int parallelism = 8; ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(parallelism); DataSet<Tuple3<Double, StringValue, LongValue>> initialData = getSourceDataSet(env); initialData.distinct(1, 2).output(new DiscardingOutputFormat<Tuple3<Double, StringValue, LongValue>>()); Plan p = env.createProgramPlan(); GenericDataSinkBase<?> sink = p.getDataSinks().iterator().next(); // currently distinct is translated to a Reduce ReduceOperatorBase<?, ?> reducer = (ReduceOperatorBase<?, ?>) sink.getInput(); // check types assertEquals(initialData.getType(), reducer.getOperatorInfo().getInputType()); assertEquals(initialData.getType(), reducer.getOperatorInfo().getOutputType()); // check keys assertArrayEquals(new int[] {1, 2}, reducer.getKeyColumns(0)); // parallelism was not configured on the operator assertTrue(reducer.getParallelism() == 1 || reducer.getParallelism() == -1); assertTrue(reducer.getInput() instanceof GenericDataSourceBase<?, ?>); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); fail("Test caused an error: " + e.getMessage()); } }
Example #24
Source File: StringValueUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testTokenizerOnStringWithoutNexToken() { StringValue testString = new StringValue("test"); StringValueUtils.WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setStringToTokenize(testString); //first token tokenizer.next(testString); //next token is not exist assertFalse(tokenizer.next(testString)); }
Example #25
Source File: PairGenerator.java From flink with Apache License 2.0 | 5 votes |
public PairGenerator(long seed, int keyMax, int valueLength, KeyMode keyMode, ValueMode valueMode, String constant) { this.seed = seed; this.keyMax = keyMax; this.valueLength = valueLength; this.keyMode = keyMode; this.valueMode = valueMode; this.random = new Random(seed); this.counter = 0; this.valueConstant = new StringValue(); if (constant != null) { this.valueConstant.setValue(constant); } }
Example #26
Source File: StringValueUtils.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Replaces all non-word characters in a string by a given character. The only * characters not replaced are the characters that qualify as word characters * or digit characters with respect to {@link Character#isLetter(char)} or * {@link Character#isDigit(char)}, as well as the underscore character. * * <p>This operation is intended to simplify strings for counting distinct words. * * @param string The string value to have the non-word characters replaced. * @param replacement The character to use as the replacement. */ public static void replaceNonWordChars(StringValue string, char replacement) { final char[] chars = string.getCharArray(); final int len = string.length(); for (int i = 0; i < len; i++) { final char c = chars[i]; if (!(Character.isLetter(c) || Character.isDigit(c) || c == '_')) { chars[i] = replacement; } } }
Example #27
Source File: DriverTestData.java From flink with Apache License 2.0 | 5 votes |
public static List<Tuple2<StringValue, IntValue>> createReduceMutableDataGroupedResult() { List<Tuple2<StringValue, IntValue>> data = new ArrayList<Tuple2<StringValue, IntValue>>(); data.add(new Tuple2<StringValue, IntValue>(new StringValue("a"), new IntValue(1))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("b"), new IntValue(2))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("c"), new IntValue(3))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("dd"), new IntValue(9))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("eee"), new IntValue(21))); data.add(new Tuple2<StringValue, IntValue>(new StringValue("ffff"), new IntValue(42))); return data; }
Example #28
Source File: StringSerializationTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static final void testCopy(String[] values) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(4096); DataOutputStream serializer = new DataOutputStream(baos); for (String value : values) { StringValue.writeString(value, serializer); } serializer.close(); baos.close(); ByteArrayInputStream sourceInput = new ByteArrayInputStream(baos.toByteArray()); DataInputStream source = new DataInputStream(sourceInput); ByteArrayOutputStream targetOutput = new ByteArrayOutputStream(4096); DataOutputStream target = new DataOutputStream(targetOutput); for (int i = 0; i < values.length; i++) { StringValue.copyString(source, target); } ByteArrayInputStream validateInput = new ByteArrayInputStream(targetOutput.toByteArray()); DataInputStream validate = new DataInputStream(validateInput); int num = 0; while (validate.available() > 0) { String deser = StringValue.readString(validate); assertEquals("DeserializedString differs from original string.", values[num], deser); num++; } assertEquals("Wrong number of deserialized values", values.length, num); }
Example #29
Source File: CSVReaderTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testWithValueType() throws Exception { CsvReader reader = getCsvReader(); DataSource<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> items = reader.types(StringValue.class, BooleanValue.class, ByteValue.class, ShortValue.class, IntValue.class, LongValue.class, FloatValue.class, DoubleValue.class); TypeInformation<?> info = items.getType(); Assert.assertEquals(true, info.isTupleType()); Assert.assertEquals(Tuple8.class, info.getTypeClass()); }
Example #30
Source File: GraphKeyTypeTransform.java From flink with Apache License 2.0 | 5 votes |
@Override public LongValueWithProperHashCode translate(StringValue value, LongValueWithProperHashCode reuse) throws Exception { if (reuse == null) { reuse = new LongValueWithProperHashCode(); } reuse.setValue(Long.parseLong(value.getValue())); return reuse; }