Java Code Examples for org.apache.flink.table.types.logical.VarCharType#MAX_LENGTH
The following examples show how to use
org.apache.flink.table.types.logical.VarCharType#MAX_LENGTH .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BytesHashMapTest.java From flink with Apache License 2.0 | 6 votes |
public BytesHashMapTest() { this.keyTypes = new LogicalType[] { new IntType(), new VarCharType(VarCharType.MAX_LENGTH), new DoubleType(), new BigIntType(), new BooleanType(), new FloatType(), new SmallIntType() }; this.valueTypes = new LogicalType[] { new DoubleType(), new BigIntType(), new BooleanType(), new FloatType(), new SmallIntType() }; this.keySerializer = new BinaryRowDataSerializer(keyTypes.length); this.valueSerializer = new BinaryRowDataSerializer(valueTypes.length); this.defaultValue = valueSerializer.createInstance(); int valueSize = defaultValue.getFixedLengthPartSize(); this.defaultValue.pointTo(MemorySegmentFactory.wrap(new byte[valueSize]), 0, valueSize); }
Example 2
Source File: BytesHashMapTest.java From flink with Apache License 2.0 | 6 votes |
public BytesHashMapTest() { this.keyTypes = new LogicalType[] { new IntType(), new VarCharType(VarCharType.MAX_LENGTH), new DoubleType(), new BigIntType(), new BooleanType(), new FloatType(), new SmallIntType() }; this.valueTypes = new LogicalType[] { new DoubleType(), new BigIntType(), new BooleanType(), new FloatType(), new SmallIntType() }; this.keySerializer = new BinaryRowSerializer(keyTypes.length); this.valueSerializer = new BinaryRowSerializer(valueTypes.length); this.defaultValue = valueSerializer.createInstance(); int valueSize = defaultValue.getFixedLengthPartSize(); this.defaultValue.pointTo(MemorySegmentFactory.wrap(new byte[valueSize]), 0, valueSize); }
Example 3
Source File: BinaryRowTypeInfoTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBinaryRowTypeInfoInequality() { BaseRowTypeInfo typeInfo1 = new BaseRowTypeInfo( new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); BaseRowTypeInfo typeInfo2 = new BaseRowTypeInfo( new IntType(), new BooleanType()); assertNotEquals(typeInfo1, typeInfo2); assertNotEquals(typeInfo1.hashCode(), typeInfo2.hashCode()); }
Example 4
Source File: RowDataSerializerTest.java From flink with Apache License 2.0 | 5 votes |
private static Object[] testLargeRowDataSerializer() { RowDataTypeInfo typeInfo = new RowDataTypeInfo( new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); GenericRowData row = new GenericRowData(13); row.setField(0, 2); row.setField(1, null); row.setField(3, null); row.setField(4, null); row.setField(5, null); row.setField(6, null); row.setField(7, null); row.setField(8, null); row.setField(9, null); row.setField(10, null); row.setField(11, null); row.setField(12, fromString("Test")); RowDataSerializer serializer = typeInfo.createSerializer(new ExecutionConfig()); return new Object[] {serializer, new RowData[]{row}}; }
Example 5
Source File: RowDataSerializerTest.java From flink with Apache License 2.0 | 5 votes |
private static Object[] testRowDataSerializer() { RowDataTypeInfo typeInfo = new RowDataTypeInfo(new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); GenericRowData row1 = new GenericRowData(2); row1.setField(0, 1); row1.setField(1, fromString("a")); GenericRowData row2 = new GenericRowData(2); row2.setField(0, 2); row2.setField(1, null); RowDataSerializer serializer = typeInfo.createSerializer(new ExecutionConfig()); return new Object[] {serializer, new RowData[]{row1, row2}}; }
Example 6
Source File: RowDataTypeInfoTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBinaryRowTypeInfoInequality() { RowDataTypeInfo typeInfo1 = new RowDataTypeInfo( new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); RowDataTypeInfo typeInfo2 = new RowDataTypeInfo( new IntType(), new BooleanType()); assertNotEquals(typeInfo1, typeInfo2); assertNotEquals(typeInfo1.hashCode(), typeInfo2.hashCode()); }
Example 7
Source File: RowDataTypeInfoTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBinaryRowTypeInfoEquality() { RowDataTypeInfo typeInfo1 = new RowDataTypeInfo( new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); RowDataTypeInfo typeInfo2 = new RowDataTypeInfo( new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); assertEquals(typeInfo1, typeInfo2); assertEquals(typeInfo1.hashCode(), typeInfo2.hashCode()); }
Example 8
Source File: BaseRowSerializerTest.java From flink with Apache License 2.0 | 5 votes |
private static Object[] testBaseRowSerializerWithComplexTypes() { BaseRowTypeInfo typeInfo = new BaseRowTypeInfo( new IntType(), new DoubleType(), new VarCharType(VarCharType.MAX_LENGTH), new ArrayType(new IntType()), new MapType(new IntType(), new IntType())); GenericRow[] data = new GenericRow[]{ createRow(null, null, null, null, null), createRow(0, null, null, null, null), createRow(0, 0.0, null, null, null), createRow(0, 0.0, fromString("a"), null, null), createRow(1, 0.0, fromString("a"), null, null), createRow(1, 1.0, fromString("a"), null, null), createRow(1, 1.0, fromString("b"), null, null), createRow(1, 1.0, fromString("b"), createArray(1), createMap(new int[]{1}, new int[]{1})), createRow(1, 1.0, fromString("b"), createArray(1, 2), createMap(new int[]{1, 4}, new int[]{1, 2})), createRow(1, 1.0, fromString("b"), createArray(1, 2, 3), createMap(new int[]{1, 5}, new int[]{1, 3})), createRow(1, 1.0, fromString("b"), createArray(1, 2, 3, 4), createMap(new int[]{1, 6}, new int[]{1, 4})), createRow(1, 1.0, fromString("b"), createArray(1, 2, 3, 4, 5), createMap(new int[]{1, 7}, new int[]{1, 5})), createRow(1, 1.0, fromString("b"), createArray(1, 2, 3, 4, 5, 6), createMap(new int[]{1, 8}, new int[]{1, 6})) }; BaseRowSerializer serializer = typeInfo.createSerializer(new ExecutionConfig()); return new Object[] {serializer, data}; }
Example 9
Source File: BaseRowSerializerTest.java From flink with Apache License 2.0 | 5 votes |
private static Object[] testLargeBaseRowSerializer() { BaseRowTypeInfo typeInfo = new BaseRowTypeInfo( new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); GenericRow row = new GenericRow(13); row.setField(0, 2); row.setField(1, null); row.setField(3, null); row.setField(4, null); row.setField(5, null); row.setField(6, null); row.setField(7, null); row.setField(8, null); row.setField(9, null); row.setField(10, null); row.setField(11, null); row.setField(12, fromString("Test")); BaseRowSerializer serializer = typeInfo.createSerializer(new ExecutionConfig()); return new Object[] {serializer, new BaseRow[]{row}}; }
Example 10
Source File: BaseRowSerializerTest.java From flink with Apache License 2.0 | 5 votes |
private static Object[] testBaseRowSerializer() { BaseRowTypeInfo typeInfo = new BaseRowTypeInfo(new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); GenericRow row1 = new GenericRow(2); row1.setField(0, 1); row1.setField(1, fromString("a")); GenericRow row2 = new GenericRow(2); row2.setField(0, 2); row2.setField(1, null); BaseRowSerializer serializer = typeInfo.createSerializer(new ExecutionConfig()); return new Object[] {serializer, new BaseRow[]{row1, row2}}; }
Example 11
Source File: BinaryRowTypeInfoTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBinaryRowTypeInfoEquality() { BaseRowTypeInfo typeInfo1 = new BaseRowTypeInfo( new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); BaseRowTypeInfo typeInfo2 = new BaseRowTypeInfo( new IntType(), new VarCharType(VarCharType.MAX_LENGTH)); assertEquals(typeInfo1, typeInfo2); assertEquals(typeInfo1.hashCode(), typeInfo2.hashCode()); }
Example 12
Source File: RowTimeSortOperatorTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSortOnTwoFields() throws Exception { BaseRowTypeInfo inputRowType = new BaseRowTypeInfo( new IntType(), new BigIntType(), new VarCharType(VarCharType.MAX_LENGTH), new IntType()); // Note: RowTimeIdx must be 0 in product environment, the value is 1 here just for simplify the testing int rowTimeIdx = 1; GeneratedRecordComparator gComparator = new GeneratedRecordComparator("", "", new Object[0]) { private static final long serialVersionUID = -6067266199060901331L; @Override public RecordComparator newInstance(ClassLoader classLoader) { return IntRecordComparator.INSTANCE; } }; BaseRowHarnessAssertor assertor = new BaseRowHarnessAssertor(inputRowType.getFieldTypes()); RowTimeSortOperator operator = createSortOperator(inputRowType, rowTimeIdx, gComparator); OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness = createTestHarness(operator); testHarness.open(); testHarness.processElement(record(3, 3L, "Hello world", 3)); testHarness.processElement(record(2, 2L, "Hello", 2)); testHarness.processElement(record(6, 2L, "Luke Skywalker", 6)); testHarness.processElement(record(5, 3L, "I am fine.", 5)); testHarness.processElement(record(7, 1L, "Comment#1", 7)); testHarness.processElement(record(9, 4L, "Comment#3", 9)); testHarness.processElement(record(10, 4L, "Comment#4", 10)); testHarness.processElement(record(8, 4L, "Comment#2", 8)); testHarness.processElement(record(1, 1L, "Hi", 2)); testHarness.processElement(record(1, 1L, "Hi", 1)); testHarness.processElement(record(4, 3L, "Helloworld, how are you?", 4)); testHarness.processElement(record(4, 5L, "Hello, how are you?", 4)); testHarness.processWatermark(new Watermark(4L)); List<Object> expectedOutput = new ArrayList<>(); expectedOutput.add(record(1, 1L, "Hi", 2)); expectedOutput.add(record(1, 1L, "Hi", 1)); expectedOutput.add(record(7, 1L, "Comment#1", 7)); expectedOutput.add(record(2, 2L, "Hello", 2)); expectedOutput.add(record(6, 2L, "Luke Skywalker", 6)); expectedOutput.add(record(3, 3L, "Hello world", 3)); expectedOutput.add(record(4, 3L, "Helloworld, how are you?", 4)); expectedOutput.add(record(5, 3L, "I am fine.", 5)); expectedOutput.add(record(8, 4L, "Comment#2", 8)); expectedOutput.add(record(9, 4L, "Comment#3", 9)); expectedOutput.add(record(10, 4L, "Comment#4", 10)); expectedOutput.add(new Watermark(4L)); // do a snapshot, data could be recovered from state OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0); assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput()); testHarness.close(); expectedOutput.clear(); operator = createSortOperator(inputRowType, rowTimeIdx, gComparator); testHarness = createTestHarness(operator); testHarness.initializeState(snapshot); testHarness.open(); // late data will be dropped testHarness.processElement(record(5, 3L, "I am fine.", 6)); testHarness.processWatermark(new Watermark(5L)); expectedOutput.add(record(4, 5L, "Hello, how are you?", 4)); expectedOutput.add(new Watermark(5L)); assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput()); // those watermark has no effect testHarness.processWatermark(new Watermark(11L)); testHarness.processWatermark(new Watermark(12L)); expectedOutput.add(new Watermark(11L)); expectedOutput.add(new Watermark(12L)); assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput()); }
Example 13
Source File: OrcSplitReaderUtil.java From flink with Apache License 2.0 | 4 votes |
/** * See {@code org.apache.flink.table.catalog.hive.util.HiveTypeUtil}. */ public static TypeDescription logicalTypeToOrcType(LogicalType type) { type = type.copy(true); switch (type.getTypeRoot()) { case CHAR: return TypeDescription.createChar().withMaxLength(((CharType) type).getLength()); case VARCHAR: int len = ((VarCharType) type).getLength(); if (len == VarCharType.MAX_LENGTH) { return TypeDescription.createString(); } else { return TypeDescription.createVarchar().withMaxLength(len); } case BOOLEAN: return TypeDescription.createBoolean(); case VARBINARY: if (type.equals(DataTypes.BYTES().getLogicalType())) { return TypeDescription.createBinary(); } else { throw new UnsupportedOperationException( "Not support other binary type: " + type); } case DECIMAL: DecimalType decimalType = (DecimalType) type; return TypeDescription.createDecimal() .withScale(decimalType.getScale()) .withPrecision(decimalType.getPrecision()); case TINYINT: return TypeDescription.createByte(); case SMALLINT: return TypeDescription.createShort(); case INTEGER: return TypeDescription.createInt(); case BIGINT: return TypeDescription.createLong(); case FLOAT: return TypeDescription.createFloat(); case DOUBLE: return TypeDescription.createDouble(); case DATE: return TypeDescription.createDate(); case TIMESTAMP_WITHOUT_TIME_ZONE: return TypeDescription.createTimestamp(); case ARRAY: ArrayType arrayType = (ArrayType) type; return TypeDescription.createList(logicalTypeToOrcType(arrayType.getElementType())); case MAP: MapType mapType = (MapType) type; return TypeDescription.createMap( logicalTypeToOrcType(mapType.getKeyType()), logicalTypeToOrcType(mapType.getValueType())); case ROW: RowType rowType = (RowType) type; TypeDescription struct = TypeDescription.createStruct(); for (int i = 0; i < rowType.getFieldCount(); i++) { struct.addField( rowType.getFieldNames().get(i), logicalTypeToOrcType(rowType.getChildren().get(i))); } return struct; default: throw new UnsupportedOperationException("Unsupported type: " + type); } }
Example 14
Source File: LogicalTypeParser.java From flink with Apache License 2.0 | 4 votes |
private LogicalType parseTypeByKeyword() { nextToken(TokenType.KEYWORD); switch (tokenAsKeyword()) { case CHAR: return parseCharType(); case VARCHAR: return parseVarCharType(); case STRING: return new VarCharType(VarCharType.MAX_LENGTH); case BOOLEAN: return new BooleanType(); case BINARY: return parseBinaryType(); case VARBINARY: return parseVarBinaryType(); case BYTES: return new VarBinaryType(VarBinaryType.MAX_LENGTH); case DECIMAL: case NUMERIC: case DEC: return parseDecimalType(); case TINYINT: return new TinyIntType(); case SMALLINT: return new SmallIntType(); case INT: case INTEGER: return new IntType(); case BIGINT: return new BigIntType(); case FLOAT: return new FloatType(); case DOUBLE: return parseDoubleType(); case DATE: return new DateType(); case TIME: return parseTimeType(); case TIMESTAMP: return parseTimestampType(); case INTERVAL: return parseIntervalType(); case ARRAY: return parseArrayType(); case MULTISET: return parseMultisetType(); case MAP: return parseMapType(); case ROW: return parseRowType(); case NULL: return new NullType(); case ANY: return parseAnyType(); default: throw parsingError("Unsupported type: " + token().value); } }
Example 15
Source File: WindowOperatorTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testProcessingTimeSessionWindows() throws Throwable { closeCalled.set(0); WindowOperator operator = WindowOperatorBuilder .builder() .withInputFields(inputFieldTypes) .session(Duration.ofSeconds(3)) .withProcessingTime() .aggregateAndBuild(getTimeWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes); OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness = createTestHarness(operator); BaseRowHarnessAssertor assertor = new BaseRowHarnessAssertor( outputType.getFieldTypes(), new GenericRowRecordSortComparator(0, new VarCharType(VarCharType.MAX_LENGTH))); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); testHarness.open(); // timestamp is ignored in processing time testHarness.setProcessingTime(3); testHarness.processElement(record("key2", 1, 1L)); testHarness.setProcessingTime(1000); testHarness.processElement(record("key2", 1, 1002L)); testHarness.setProcessingTime(5000); expectedOutput.addAll(doubleRecord(isTableAggregate, record("key2", 2L, 2L, 3L, 4000L, 3999L))); assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.processElement(record("key2", 1, 5000L)); testHarness.processElement(record("key2", 1, 5000L)); testHarness.processElement(record("key1", 1, 5000L)); testHarness.processElement(record("key1", 1, 5000L)); testHarness.processElement(record("key1", 1, 5000L)); testHarness.setProcessingTime(10000); expectedOutput.addAll(doubleRecord(isTableAggregate, record("key2", 2L, 2L, 5000L, 8000L, 7999L))); expectedOutput.addAll(doubleRecord(isTableAggregate, record("key1", 3L, 3L, 5000L, 8000L, 7999L))); assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.close(); }
Example 16
Source File: ParquetColumnarRowSplitReaderTest.java From flink with Apache License 2.0 | 4 votes |
private int readSplitAndCheck( int start, long seekToRow, Path testPath, long splitStart, long splitLength, List<Integer> values) throws IOException { LogicalType[] fieldTypes = new LogicalType[]{ new VarCharType(VarCharType.MAX_LENGTH), new BooleanType(), new TinyIntType(), new SmallIntType(), new IntType(), new BigIntType(), new FloatType(), new DoubleType(), new TimestampType(9), new DecimalType(5, 0), new DecimalType(15, 0), new DecimalType(20, 0), new DecimalType(5, 0), new DecimalType(15, 0), new DecimalType(20, 0)}; ParquetColumnarRowSplitReader reader = new ParquetColumnarRowSplitReader( false, true, new Configuration(), fieldTypes, new String[] { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13", "f14"}, VectorizedColumnBatch::new, 500, new org.apache.hadoop.fs.Path(testPath.getPath()), splitStart, splitLength); reader.seekToRow(seekToRow); int i = start; while (!reader.reachedEnd()) { ColumnarRowData row = reader.nextRecord(); Integer v = values.get(i); if (v == null) { assertTrue(row.isNullAt(0)); assertTrue(row.isNullAt(1)); assertTrue(row.isNullAt(2)); assertTrue(row.isNullAt(3)); assertTrue(row.isNullAt(4)); assertTrue(row.isNullAt(5)); assertTrue(row.isNullAt(6)); assertTrue(row.isNullAt(7)); assertTrue(row.isNullAt(8)); assertTrue(row.isNullAt(9)); assertTrue(row.isNullAt(10)); assertTrue(row.isNullAt(11)); assertTrue(row.isNullAt(12)); assertTrue(row.isNullAt(13)); assertTrue(row.isNullAt(14)); } else { assertEquals("" + v, row.getString(0).toString()); assertEquals(v % 2 == 0, row.getBoolean(1)); assertEquals(v.byteValue(), row.getByte(2)); assertEquals(v.shortValue(), row.getShort(3)); assertEquals(v.intValue(), row.getInt(4)); assertEquals(v.longValue(), row.getLong(5)); assertEquals(v.floatValue(), row.getFloat(6), 0); assertEquals(v.doubleValue(), row.getDouble(7), 0); assertEquals( toDateTime(v), row.getTimestamp(8, 9).toLocalDateTime()); assertEquals(BigDecimal.valueOf(v), row.getDecimal(9, 5, 0).toBigDecimal()); assertEquals(BigDecimal.valueOf(v), row.getDecimal(10, 15, 0).toBigDecimal()); assertEquals(BigDecimal.valueOf(v), row.getDecimal(11, 20, 0).toBigDecimal()); assertEquals(BigDecimal.valueOf(v), row.getDecimal(12, 5, 0).toBigDecimal()); assertEquals(BigDecimal.valueOf(v), row.getDecimal(13, 15, 0).toBigDecimal()); assertEquals(BigDecimal.valueOf(v), row.getDecimal(14, 20, 0).toBigDecimal()); } i++; } reader.close(); return i - start; }
Example 17
Source File: WindowOperatorTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testProcessingTimeSessionWindows() throws Throwable { closeCalled.set(0); WindowOperator operator = WindowOperatorBuilder .builder() .withInputFields(inputFieldTypes) .session(Duration.ofSeconds(3)) .withProcessingTime() .aggregateAndBuild(getTimeWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); RowDataHarnessAssertor assertor = new RowDataHarnessAssertor( outputType.getFieldTypes(), new GenericRowRecordSortComparator(0, new VarCharType(VarCharType.MAX_LENGTH))); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); testHarness.open(); // timestamp is ignored in processing time testHarness.setProcessingTime(3); testHarness.processElement(insertRecord("key2", 1, 1L)); testHarness.setProcessingTime(1000); testHarness.processElement(insertRecord("key2", 1, 1002L)); testHarness.setProcessingTime(5000); expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 2L, 2L, 3L, 4000L, 3999L))); assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.processElement(insertRecord("key2", 1, 5000L)); testHarness.processElement(insertRecord("key2", 1, 5000L)); testHarness.processElement(insertRecord("key1", 1, 5000L)); testHarness.processElement(insertRecord("key1", 1, 5000L)); testHarness.processElement(insertRecord("key1", 1, 5000L)); testHarness.setProcessingTime(10000); expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 2L, 2L, 5000L, 8000L, 7999L))); expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key1", 3L, 3L, 5000L, 8000L, 7999L))); assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.close(); }
Example 18
Source File: RowTimeSortOperatorTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSortOnTwoFields() throws Exception { RowDataTypeInfo inputRowType = new RowDataTypeInfo( new IntType(), new BigIntType(), new VarCharType(VarCharType.MAX_LENGTH), new IntType()); // Note: RowTimeIdx must be 0 in product environment, the value is 1 here just for simplify the testing int rowTimeIdx = 1; GeneratedRecordComparator gComparator = new GeneratedRecordComparator("", "", new Object[0]) { private static final long serialVersionUID = -6067266199060901331L; @Override public RecordComparator newInstance(ClassLoader classLoader) { return IntRecordComparator.INSTANCE; } }; RowDataHarnessAssertor assertor = new RowDataHarnessAssertor(inputRowType.getFieldTypes()); RowTimeSortOperator operator = createSortOperator(inputRowType, rowTimeIdx, gComparator); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); testHarness.processElement(insertRecord(3, 3L, "Hello world", 3)); testHarness.processElement(insertRecord(2, 2L, "Hello", 2)); testHarness.processElement(insertRecord(6, 2L, "Luke Skywalker", 6)); testHarness.processElement(insertRecord(5, 3L, "I am fine.", 5)); testHarness.processElement(insertRecord(7, 1L, "Comment#1", 7)); testHarness.processElement(insertRecord(9, 4L, "Comment#3", 9)); testHarness.processElement(insertRecord(10, 4L, "Comment#4", 10)); testHarness.processElement(insertRecord(8, 4L, "Comment#2", 8)); testHarness.processElement(insertRecord(1, 1L, "Hi", 2)); testHarness.processElement(insertRecord(1, 1L, "Hi", 1)); testHarness.processElement(insertRecord(4, 3L, "Helloworld, how are you?", 4)); testHarness.processElement(insertRecord(4, 5L, "Hello, how are you?", 4)); testHarness.processWatermark(new Watermark(4L)); List<Object> expectedOutput = new ArrayList<>(); expectedOutput.add(insertRecord(1, 1L, "Hi", 2)); expectedOutput.add(insertRecord(1, 1L, "Hi", 1)); expectedOutput.add(insertRecord(7, 1L, "Comment#1", 7)); expectedOutput.add(insertRecord(2, 2L, "Hello", 2)); expectedOutput.add(insertRecord(6, 2L, "Luke Skywalker", 6)); expectedOutput.add(insertRecord(3, 3L, "Hello world", 3)); expectedOutput.add(insertRecord(4, 3L, "Helloworld, how are you?", 4)); expectedOutput.add(insertRecord(5, 3L, "I am fine.", 5)); expectedOutput.add(insertRecord(8, 4L, "Comment#2", 8)); expectedOutput.add(insertRecord(9, 4L, "Comment#3", 9)); expectedOutput.add(insertRecord(10, 4L, "Comment#4", 10)); expectedOutput.add(new Watermark(4L)); // do a snapshot, data could be recovered from state OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0); assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput()); testHarness.close(); expectedOutput.clear(); operator = createSortOperator(inputRowType, rowTimeIdx, gComparator); testHarness = createTestHarness(operator); testHarness.initializeState(snapshot); testHarness.open(); // late data will be dropped testHarness.processElement(insertRecord(5, 3L, "I am fine.", 6)); testHarness.processWatermark(new Watermark(5L)); expectedOutput.add(insertRecord(4, 5L, "Hello, how are you?", 4)); expectedOutput.add(new Watermark(5L)); assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput()); // those watermark has no effect testHarness.processWatermark(new Watermark(11L)); testHarness.processWatermark(new Watermark(12L)); expectedOutput.add(new Watermark(11L)); expectedOutput.add(new Watermark(12L)); assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput()); }
Example 19
Source File: RowTimeSortOperatorTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testOnlySortOnRowTime() throws Exception { RowDataTypeInfo inputRowType = new RowDataTypeInfo( new BigIntType(), new BigIntType(), new VarCharType(VarCharType.MAX_LENGTH), new IntType()); int rowTimeIdx = 0; RowDataHarnessAssertor assertor = new RowDataHarnessAssertor(inputRowType.getFieldTypes()); RowTimeSortOperator operator = createSortOperator(inputRowType, rowTimeIdx, null); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); testHarness.processElement(insertRecord(3L, 2L, "Hello world", 3)); testHarness.processElement(insertRecord(2L, 2L, "Hello", 2)); testHarness.processElement(insertRecord(6L, 3L, "Luke Skywalker", 6)); testHarness.processElement(insertRecord(5L, 3L, "I am fine.", 5)); testHarness.processElement(insertRecord(7L, 4L, "Comment#1", 7)); testHarness.processElement(insertRecord(9L, 4L, "Comment#3", 9)); testHarness.processElement(insertRecord(10L, 4L, "Comment#4", 10)); testHarness.processElement(insertRecord(8L, 4L, "Comment#2", 8)); testHarness.processElement(insertRecord(1L, 1L, "Hi", 2)); testHarness.processElement(insertRecord(1L, 1L, "Hi", 1)); testHarness.processElement(insertRecord(4L, 3L, "Helloworld, how are you?", 4)); testHarness.processWatermark(new Watermark(9L)); List<Object> expectedOutput = new ArrayList<>(); expectedOutput.add(insertRecord(1L, 1L, "Hi", 2)); expectedOutput.add(insertRecord(1L, 1L, "Hi", 1)); expectedOutput.add(insertRecord(2L, 2L, "Hello", 2)); expectedOutput.add(insertRecord(3L, 2L, "Hello world", 3)); expectedOutput.add(insertRecord(4L, 3L, "Helloworld, how are you?", 4)); expectedOutput.add(insertRecord(5L, 3L, "I am fine.", 5)); expectedOutput.add(insertRecord(6L, 3L, "Luke Skywalker", 6)); expectedOutput.add(insertRecord(7L, 4L, "Comment#1", 7)); expectedOutput.add(insertRecord(8L, 4L, "Comment#2", 8)); expectedOutput.add(insertRecord(9L, 4L, "Comment#3", 9)); expectedOutput.add(new Watermark(9L)); // do a snapshot, data could be recovered from state OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0); assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput()); testHarness.close(); expectedOutput.clear(); operator = createSortOperator(inputRowType, rowTimeIdx, null); testHarness = createTestHarness(operator); testHarness.initializeState(snapshot); testHarness.open(); // late data will be dropped testHarness.processElement(insertRecord(5L, 3L, "I am fine.", 6)); testHarness.processWatermark(new Watermark(10L)); expectedOutput.add(insertRecord(10L, 4L, "Comment#4", 10)); expectedOutput.add(new Watermark(10L)); assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput()); // those watermark has no effect testHarness.processWatermark(new Watermark(11L)); testHarness.processWatermark(new Watermark(12L)); expectedOutput.add(new Watermark(11L)); expectedOutput.add(new Watermark(12L)); assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput()); }
Example 20
Source File: LogicalTypeParser.java From flink with Apache License 2.0 | 4 votes |
private LogicalType parseTypeByKeyword() { nextToken(TokenType.KEYWORD); switch (tokenAsKeyword()) { case CHAR: return parseCharType(); case VARCHAR: return parseVarCharType(); case STRING: return new VarCharType(VarCharType.MAX_LENGTH); case BOOLEAN: return new BooleanType(); case BINARY: return parseBinaryType(); case VARBINARY: return parseVarBinaryType(); case BYTES: return new VarBinaryType(VarBinaryType.MAX_LENGTH); case DECIMAL: case NUMERIC: case DEC: return parseDecimalType(); case TINYINT: return new TinyIntType(); case SMALLINT: return new SmallIntType(); case INT: case INTEGER: return new IntType(); case BIGINT: return new BigIntType(); case FLOAT: return new FloatType(); case DOUBLE: return parseDoubleType(); case DATE: return new DateType(); case TIME: return parseTimeType(); case TIMESTAMP: return parseTimestampType(); case INTERVAL: return parseIntervalType(); case ARRAY: return parseArrayType(); case MULTISET: return parseMultisetType(); case MAP: return parseMapType(); case ROW: return parseRowType(); case NULL: return new NullType(); case RAW: return parseRawType(); case LEGACY: return parseLegacyType(); default: throw parsingError("Unsupported type: " + token().value); } }