Java Code Examples for org.apache.flink.api.common.typeinfo.Types#INT
The following examples show how to use
org.apache.flink.api.common.typeinfo.Types#INT .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KeyedStateInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReadState() throws Exception { OperatorID operatorID = OperatorIDGenerator.fromUid("uid"); OperatorSubtaskState state = createOperatorSubtaskState(new StreamFlatMap<>(new StatefulFunction())); OperatorState operatorState = new OperatorState(operatorID, 1, 128); operatorState.putState(0, state); KeyedStateInputFormat<?, ?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), new Configuration(), new KeyedStateReaderOperator<>(new ReaderFunction(), Types.INT)); KeyGroupRangeInputSplit split = format.createInputSplits(1)[0]; KeyedStateReaderFunction<Integer, Integer> userFunction = new ReaderFunction(); List<Integer> data = readInputSplit(split, userFunction); Assert.assertEquals("Incorrect data read from input split", Arrays.asList(1, 2, 3), data); }
Example 2
Source File: KeyedStateInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReadTime() throws Exception { OperatorID operatorID = OperatorIDGenerator.fromUid("uid"); OperatorSubtaskState state = createOperatorSubtaskState(new KeyedProcessOperator<>(new StatefulFunctionWithTime())); OperatorState operatorState = new OperatorState(operatorID, 1, 128); operatorState.putState(0, state); KeyedStateInputFormat<?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), Types.INT, new TimeReaderFunction()); KeyGroupRangeInputSplit split = format.createInputSplits(1)[0]; KeyedStateReaderFunction<Integer, Integer> userFunction = new TimeReaderFunction(); List<Integer> data = readInputSplit(split, userFunction); Assert.assertEquals("Incorrect data read from input split", Arrays.asList(1, 1, 2, 2, 3, 3), data); }
Example 3
Source File: StandardScalerTest.java From Alink with Apache License 2.0 | 6 votes |
public static AlgoOperator getMultiTypeData(boolean isBatch) { Row[] testArray = new Row[]{ Row.of(new Object[]{"0", "a", 1L, 1, 0.2, true}), Row.of(new Object[]{"1", null, 2L, 2, null, true}), Row.of(new Object[]{"2", "c", null, null, null, false}), Row.of(new Object[]{"3", "a", 0L, 0, null, null}), }; String[] colNames = new String[]{"id", "f_string", "f_long", "f_int", "f_double", "f_boolean"}; TypeInformation[] colTypes = new TypeInformation[]{Types.STRING, Types.STRING, Types.LONG, Types.INT, Types.DOUBLE, Types.BOOLEAN}; TableSchema schema = new TableSchema( colNames, colTypes ); if (isBatch) { return new MemSourceBatchOp(Arrays.asList(testArray), schema); } else { return new MemSourceStreamOp(Arrays.asList(testArray), schema); } }
Example 4
Source File: OuterJoinRecordStateViews.java From flink with Apache License 2.0 | 6 votes |
private InputSideHasUniqueKey( RuntimeContext ctx, String stateName, BaseRowTypeInfo recordType, BaseRowTypeInfo uniqueKeyType, KeySelector<BaseRow, BaseRow> uniqueKeySelector, StateTtlConfig ttlConfig) { checkNotNull(uniqueKeyType); checkNotNull(uniqueKeySelector); TupleTypeInfo<Tuple2<BaseRow, Integer>> valueTypeInfo = new TupleTypeInfo<>(recordType, Types.INT); MapStateDescriptor<BaseRow, Tuple2<BaseRow, Integer>> recordStateDesc = new MapStateDescriptor<>( stateName, uniqueKeyType, valueTypeInfo); if (!ttlConfig.equals(StateTtlConfig.DISABLED)) { recordStateDesc.enableTimeToLive(ttlConfig); } this.recordState = ctx.getMapState(recordStateDesc); this.uniqueKeySelector = uniqueKeySelector; }
Example 5
Source File: KeyedStateInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Nonnull private List<Integer> readInputSplit(KeyGroupRangeInputSplit split, KeyedStateReaderFunction<Integer, Integer> userFunction) throws IOException { KeyedStateInputFormat<Integer, Integer> format = new KeyedStateInputFormat<>( new OperatorState(OperatorIDGenerator.fromUid("uid"), 1, 4), new MemoryStateBackend(), Types.INT, userFunction); List<Integer> data = new ArrayList<>(); format.setRuntimeContext(new MockStreamingRuntimeContext(false, 1, 0)); format.openInputFormat(); format.open(split); while (!format.reachedEnd()) { data.add(format.nextRecord(0)); } format.close(); format.closeInputFormat(); data.sort(Comparator.comparingInt(id -> id)); return data; }
Example 6
Source File: TableUtilTest.java From Alink with Apache License 2.0 | 6 votes |
@Test public void getCategoricalColsTest() { TableSchema tableSchema = new TableSchema(new String[]{"f0", "f1", "f2", "f3"}, new TypeInformation[]{Types.INT, Types.LONG, Types.STRING, Types.BOOLEAN}); Assert.assertArrayEquals(TableUtil.getCategoricalCols(tableSchema, tableSchema.getFieldNames(), null), new String[]{"f2", "f3"}); Assert.assertArrayEquals( TableUtil.getCategoricalCols(tableSchema, new String[]{"f2", "f1", "f0", "f3"}, new String[]{"f0"}), new String[]{"f2", "f0", "f3"}); thrown.expect(IllegalArgumentException.class); Assert.assertArrayEquals( TableUtil.getCategoricalCols(tableSchema, new String[]{"f3", "f0"}, new String[]{"f2"}), new String[]{"f3", "f2"}); }
Example 7
Source File: MinByOperatorTest.java From flink with Apache License 2.0 | 5 votes |
/** * Validates that no ClassCastException happens * should not fail e.g. like in FLINK-8255. */ @Test(expected = InvalidProgramException.class) public void testMinByRowTypeInfoKeyFieldsDataset() { final ExecutionEnvironment env = ExecutionEnvironment .getExecutionEnvironment(); TypeInformation[] types = new TypeInformation[] {Types.INT, Types.INT}; String[] fieldNames = new String[]{"id", "value"}; RowTypeInfo rowTypeInfo = new RowTypeInfo(types, fieldNames); DataSet tupleDs = env .fromCollection(Collections.singleton(new Row(2)), rowTypeInfo); tupleDs.minBy(0); }
Example 8
Source File: LocalExecutorITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testTableSchema() throws Exception { final Executor executor = createDefaultExecutor(clusterClient); final SessionContext session = new SessionContext("test-session", new Environment()); final TableSchema actualTableSchema = executor.getTableSchema(session, "TableNumber2"); final TableSchema expectedTableSchema = new TableSchema( new String[] {"IntegerField2", "StringField2"}, new TypeInformation[] {Types.INT, Types.STRING}); assertEquals(expectedTableSchema, actualTableSchema); }
Example 9
Source File: JavaUserDefinedAggFunctions.java From flink with Apache License 2.0 | 5 votes |
@Override public CountDistinctAccum createAccumulator() { CountDistinctAccum accum = new CountDistinctAccum(); accum.map = new MapView<>(Types.STRING, Types.INT); accum.count = 0L; return accum; }
Example 10
Source File: OuterJoinRecordStateViews.java From flink with Apache License 2.0 | 5 votes |
private InputSideHasNoUniqueKey( RuntimeContext ctx, String stateName, BaseRowTypeInfo recordType, StateTtlConfig ttlConfig) { TupleTypeInfo<Tuple2<Integer, Integer>> tupleTypeInfo = new TupleTypeInfo<>(Types.INT, Types.INT); MapStateDescriptor<BaseRow, Tuple2<Integer, Integer>> recordStateDesc = new MapStateDescriptor<>( stateName, recordType, tupleTypeInfo); if (!ttlConfig.equals(StateTtlConfig.DISABLED)) { recordStateDesc.enableTimeToLive(ttlConfig); } this.recordState = ctx.getMapState(recordStateDesc); }
Example 11
Source File: KeyedStateInputFormatTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMaxParallelismRespected() throws Exception { OperatorID operatorID = OperatorIDGenerator.fromUid("uid"); OperatorSubtaskState state = createOperatorSubtaskState(new StreamFlatMap<>(new StatefulFunction())); OperatorState operatorState = new OperatorState(operatorID, 1, 128); operatorState.putState(0, state); KeyedStateInputFormat<?, ?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), new Configuration(), new KeyedStateReaderOperator<>(new ReaderFunction(), Types.INT)); KeyGroupRangeInputSplit[] splits = format.createInputSplits(129); Assert.assertEquals("Failed to properly partition operator state into input splits", 128, splits.length); }
Example 12
Source File: OuterJoinRecordStateViews.java From flink with Apache License 2.0 | 5 votes |
private JoinKeyContainsUniqueKey(RuntimeContext ctx, String stateName, RowDataTypeInfo recordType, StateTtlConfig ttlConfig) { TupleTypeInfo<Tuple2<RowData, Integer>> valueTypeInfo = new TupleTypeInfo<>(recordType, Types.INT); ValueStateDescriptor<Tuple2<RowData, Integer>> recordStateDesc = new ValueStateDescriptor<>( stateName, valueTypeInfo); if (ttlConfig.isEnabled()) { recordStateDesc.enableTimeToLive(ttlConfig); } this.recordState = ctx.getState(recordStateDesc); // the result records always not more than 1 this.reusedRecordList = new ArrayList<>(1); this.reusedTupleList = new ArrayList<>(1); }
Example 13
Source File: OrcTableSourceTest.java From flink with Apache License 2.0 | 5 votes |
private TypeInformation[] getNestedFieldTypes() { return new TypeInformation[]{ Types.BOOLEAN, Types.BYTE, Types.SHORT, Types.INT, Types.LONG, Types.FLOAT, Types.DOUBLE, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO, Types.STRING, Types.ROW_NAMED( new String[]{"list"}, ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED( new String[]{"int1", "string1"}, Types.INT, Types.STRING ) ) ), ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED( new String[]{"int1", "string1"}, Types.INT, Types.STRING ) ), new MapTypeInfo<>( Types.STRING, Types.ROW_NAMED( new String[]{"int1", "string1"}, Types.INT, Types.STRING ) ) }; }
Example 14
Source File: KeyedStateInputFormatTest.java From flink with Apache License 2.0 | 5 votes |
private OperatorSubtaskState createOperatorSubtaskState(OneInputStreamOperator<Integer, Void> operator) throws Exception { try (KeyedOneInputStreamOperatorTestHarness<Integer, Integer, Void> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, id -> id, Types.INT, 128, 1, 0)) { testHarness.setup(VoidSerializer.INSTANCE); testHarness.open(); testHarness.processElement(1, 0); testHarness.processElement(2, 0); testHarness.processElement(3, 0); return testHarness.snapshot(0, 0); } }
Example 15
Source File: AvroSchemaConverter.java From flink with Apache License 2.0 | 4 votes |
private static TypeInformation<?> convertToTypeInfo(Schema schema) { switch (schema.getType()) { case RECORD: final List<Schema.Field> fields = schema.getFields(); final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()]; final String[] names = new String[fields.size()]; for (int i = 0; i < fields.size(); i++) { final Schema.Field field = fields.get(i); types[i] = convertToTypeInfo(field.schema()); names[i] = field.name(); } return Types.ROW_NAMED(names, types); case ENUM: return Types.STRING; case ARRAY: // result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings return Types.OBJECT_ARRAY(convertToTypeInfo(schema.getElementType())); case MAP: return Types.MAP(Types.STRING, convertToTypeInfo(schema.getValueType())); case UNION: final Schema actualSchema; if (schema.getTypes().size() == 2 && schema.getTypes().get(0).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(1); } else if (schema.getTypes().size() == 2 && schema.getTypes().get(1).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(0); } else if (schema.getTypes().size() == 1) { actualSchema = schema.getTypes().get(0); } else { // use Kryo for serialization return Types.GENERIC(Object.class); } return convertToTypeInfo(actualSchema); case FIXED: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } // convert fixed size binary data to primitive byte arrays return Types.PRIMITIVE_ARRAY(Types.BYTE); case STRING: // convert Avro's Utf8/CharSequence to String return Types.STRING; case BYTES: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } return Types.PRIMITIVE_ARRAY(Types.BYTE); case INT: // logical date and time type final LogicalType logicalType = schema.getLogicalType(); if (logicalType == LogicalTypes.date()) { return Types.SQL_DATE; } else if (logicalType == LogicalTypes.timeMillis()) { return Types.SQL_TIME; } return Types.INT; case LONG: // logical timestamp type if (schema.getLogicalType() == LogicalTypes.timestampMillis()) { return Types.SQL_TIMESTAMP; } return Types.LONG; case FLOAT: return Types.FLOAT; case DOUBLE: return Types.DOUBLE; case BOOLEAN: return Types.BOOLEAN; case NULL: return Types.VOID; } throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'."); }
Example 16
Source File: ParquetTableSourceTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testFieldsFilter() throws Exception { ParquetTableSource parquetTableSource = createNestedTestParquetTableSource(testPath); // expressions for supported predicates Expression exp1 = new GreaterThan( new PlannerResolvedFieldReference("foo", Types.LONG), new Literal(100L, Types.LONG)); Expression exp2 = new EqualTo( new Literal(100L, Types.LONG), new PlannerResolvedFieldReference("bar.spam", Types.LONG)); // unsupported predicate Expression unsupported = new EqualTo( new GetCompositeField( new ItemAt( new PlannerResolvedFieldReference( "nestedArray", ObjectArrayTypeInfo.getInfoFor( Types.ROW_NAMED(new String[] {"type", "name"}, Types.STRING, Types.STRING))), new Literal(1, Types.INT)), "type"), new Literal("test", Types.STRING)); // invalid predicate Expression invalidPred = new EqualTo( new PlannerResolvedFieldReference("nonField", Types.LONG), // some invalid, non-serializable, literal (here an object of this test class) new Literal(new ParquetTableSourceTest(), Types.LONG) ); List<Expression> exps = new ArrayList<>(); exps.add(exp1); exps.add(exp2); exps.add(unsupported); exps.add(invalidPred); // apply predict on TableSource ParquetTableSource filtered = (ParquetTableSource) parquetTableSource.applyPredicate(exps); // ensure copy is returned assertNotSame(parquetTableSource, filtered); // ensure table schema is identical assertEquals(parquetTableSource.getTableSchema(), filtered.getTableSchema()); // ensure return type is identical assertEquals(NESTED_ROW_TYPE, filtered.getReturnType()); // ensure source description is not the same assertNotEquals(parquetTableSource.explainSource(), filtered.explainSource()); // check that pushdown was recorded assertTrue(filtered.isFilterPushedDown()); assertFalse(parquetTableSource.isFilterPushedDown()); // ensure that supported predicates were removed from list of offered expressions assertEquals(2, exps.size()); assertTrue(exps.contains(unsupported)); assertTrue(exps.contains(invalidPred)); // ensure ParquetInputFormat is correctly configured with filter DataSet<Row> data = filtered.getDataSet(ExecutionEnvironment.createLocalEnvironment()); InputFormat<Row, ?> inputFormat = ((DataSource<Row>) data).getInputFormat(); assertTrue(inputFormat instanceof ParquetRowInputFormat); ParquetRowInputFormat parquetIF = (ParquetRowInputFormat) inputFormat; // expected predicate FilterPredicate a = FilterApi.gt(FilterApi.longColumn("foo"), 100L); FilterPredicate b = FilterApi.eq(FilterApi.longColumn("bar.spam"), 100L); FilterPredicate expected = FilterApi.and(a, b); // actual predicate FilterPredicate predicate = parquetIF.getPredicate(); // check predicate assertEquals(expected, predicate); }
Example 17
Source File: LastValueAggFunction.java From flink with Apache License 2.0 | 4 votes |
@Override public TypeInformation<Integer> getResultType() { return Types.INT; }
Example 18
Source File: OrcTableSourceTest.java From flink with Apache License 2.0 | 4 votes |
@Test @SuppressWarnings("unchecked") public void testApplyPredicate() throws Exception { OrcTableSource orc = OrcTableSource.builder() .path(getPath(TEST_FILE_NESTED)) .forOrcSchema(TEST_SCHEMA_NESTED) .build(); // expressions for supported predicates Expression pred1 = new GreaterThan( new PlannerResolvedFieldReference("int1", Types.INT), new Literal(100, Types.INT)); Expression pred2 = new EqualTo( new PlannerResolvedFieldReference("string1", Types.STRING), new Literal("hello", Types.STRING)); // invalid predicate Expression invalidPred = new EqualTo( new PlannerResolvedFieldReference("long1", Types.LONG), // some invalid, non-serializable literal (here an object of this test class) new Literal(new OrcTableSourceTest(), Types.LONG) ); ArrayList<Expression> preds = new ArrayList<>(); preds.add(pred1); preds.add(pred2); preds.add(unsupportedPred()); preds.add(invalidPred); // apply predicates on TableSource OrcTableSource projected = (OrcTableSource) orc.applyPredicate(preds); // ensure copy is returned assertTrue(orc != projected); // ensure table schema is identical assertEquals(orc.getTableSchema(), projected.getTableSchema()); // ensure return type is identical assertEquals( Types.ROW_NAMED(getNestedFieldNames(), getNestedFieldTypes()), projected.getReturnType()); // ensure IF is configured with valid/supported predicates OrcTableSource spyTS = spy(projected); OrcRowInputFormat mockIF = mock(OrcRowInputFormat.class); doReturn(mockIF).when(spyTS).buildOrcInputFormat(); ExecutionEnvironment environment = mock(ExecutionEnvironment.class); when(environment.createInput(any(InputFormat.class))).thenReturn(mock(DataSource.class)); spyTS.getDataSet(environment); ArgumentCaptor<OrcSplitReader.Predicate> arguments = ArgumentCaptor.forClass(OrcSplitReader.Predicate.class); verify(mockIF, times(2)).addPredicate(arguments.capture()); List<String> values = arguments.getAllValues().stream().map(Object::toString).collect(Collectors.toList()); assertTrue(values.contains( new OrcSplitReader.Not(new OrcSplitReader.LessThanEquals("int1", PredicateLeaf.Type.LONG, 100)).toString())); assertTrue(values.contains( new OrcSplitReader.Equals("string1", PredicateLeaf.Type.STRING, "hello").toString())); // ensure filter pushdown is correct assertTrue(spyTS.isFilterPushedDown()); assertFalse(orc.isFilterPushedDown()); }
Example 19
Source File: StatefulFunctionWithTime.java From flink-learning with Apache License 2.0 | 4 votes |
@Override public void open(Configuration parameters) { ValueStateDescriptor<Integer> stateDescriptor = new ValueStateDescriptor<>("state", Types.INT); state = getRuntimeContext().getState(stateDescriptor); }
Example 20
Source File: AvroSchemaConverter.java From flink with Apache License 2.0 | 4 votes |
private static TypeInformation<?> convertToTypeInfo(Schema schema) { switch (schema.getType()) { case RECORD: final List<Schema.Field> fields = schema.getFields(); final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()]; final String[] names = new String[fields.size()]; for (int i = 0; i < fields.size(); i++) { final Schema.Field field = fields.get(i); types[i] = convertToTypeInfo(field.schema()); names[i] = field.name(); } return Types.ROW_NAMED(names, types); case ENUM: return Types.STRING; case ARRAY: // result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings return Types.OBJECT_ARRAY(convertToTypeInfo(schema.getElementType())); case MAP: return Types.MAP(Types.STRING, convertToTypeInfo(schema.getValueType())); case UNION: final Schema actualSchema; if (schema.getTypes().size() == 2 && schema.getTypes().get(0).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(1); } else if (schema.getTypes().size() == 2 && schema.getTypes().get(1).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(0); } else if (schema.getTypes().size() == 1) { actualSchema = schema.getTypes().get(0); } else { // use Kryo for serialization return Types.GENERIC(Object.class); } return convertToTypeInfo(actualSchema); case FIXED: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } // convert fixed size binary data to primitive byte arrays return Types.PRIMITIVE_ARRAY(Types.BYTE); case STRING: // convert Avro's Utf8/CharSequence to String return Types.STRING; case BYTES: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } return Types.PRIMITIVE_ARRAY(Types.BYTE); case INT: // logical date and time type final org.apache.avro.LogicalType logicalType = schema.getLogicalType(); if (logicalType == LogicalTypes.date()) { return Types.SQL_DATE; } else if (logicalType == LogicalTypes.timeMillis()) { return Types.SQL_TIME; } return Types.INT; case LONG: // logical timestamp type if (schema.getLogicalType() == LogicalTypes.timestampMillis()) { return Types.SQL_TIMESTAMP; } return Types.LONG; case FLOAT: return Types.FLOAT; case DOUBLE: return Types.DOUBLE; case BOOLEAN: return Types.BOOLEAN; case NULL: return Types.VOID; } throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'."); }