org.apache.flink.api.java.typeutils.PojoTypeInfo Java Examples
The following examples show how to use
org.apache.flink.api.java.typeutils.PojoTypeInfo.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SiddhiExecutionPlanSchemaTest.java From flink-siddhi with Apache License 2.0 | 6 votes |
@Test public void testStreamSchemaWithPojo() { TypeInformation<Event> typeInfo = TypeExtractor.createTypeInfo(Event.class); assertTrue("Type information should be PojoTypeInfo", typeInfo instanceof PojoTypeInfo); SiddhiStreamSchema<Event> schema = new SiddhiStreamSchema<>(typeInfo, "id", "timestamp", "name", "price"); assertEquals(4, schema.getFieldIndexes().length); StreamDefinition streamDefinition = schema.getStreamDefinition("test_stream"); assertArrayEquals(new String[]{"id", "timestamp", "name", "price"}, streamDefinition.getAttributeNameArray()); assertEquals(Attribute.Type.INT, streamDefinition.getAttributeType("id")); assertEquals(Attribute.Type.LONG, streamDefinition.getAttributeType("timestamp")); assertEquals(Attribute.Type.STRING, streamDefinition.getAttributeType("name")); assertEquals(Attribute.Type.DOUBLE, streamDefinition.getAttributeType("price")); assertEquals("define stream test_stream (id int,timestamp long,name string,price double);", schema.getStreamDefinitionExpression("test_stream")); }
Example #2
Source File: CassandraSink.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Writes a DataStream into a Cassandra database. * * @param input input DataStream * @param <IN> input type * @return CassandraSinkBuilder, to further configure the sink */ public static <IN> CassandraSinkBuilder<IN> addSink(DataStream<IN> input) { TypeInformation<IN> typeInfo = input.getType(); if (typeInfo instanceof TupleTypeInfo) { DataStream<Tuple> tupleInput = (DataStream<Tuple>) input; return (CassandraSinkBuilder<IN>) new CassandraTupleSinkBuilder<>(tupleInput, tupleInput.getType(), tupleInput.getType().createSerializer(tupleInput.getExecutionEnvironment().getConfig())); } if (typeInfo instanceof RowTypeInfo) { DataStream<Row> rowInput = (DataStream<Row>) input; return (CassandraSinkBuilder<IN>) new CassandraRowSinkBuilder(rowInput, rowInput.getType(), rowInput.getType().createSerializer(rowInput.getExecutionEnvironment().getConfig())); } if (typeInfo instanceof PojoTypeInfo) { return new CassandraPojoSinkBuilder<>(input, input.getType(), input.getType().createSerializer(input.getExecutionEnvironment().getConfig())); } if (typeInfo instanceof CaseClassTypeInfo) { DataStream<Product> productInput = (DataStream<Product>) input; return (CassandraSinkBuilder<IN>) new CassandraScalaProductSinkBuilder<>(productInput, productInput.getType(), productInput.getType().createSerializer(input.getExecutionEnvironment().getConfig())); } throw new IllegalArgumentException("No support for the type of the given DataStream: " + input.getType()); }
Example #3
Source File: CsvInputFormatTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testPojoTypeWithMappingInformation() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,3.123,AAA,BBB\n"); wrt.write("456,1.123,BBB,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class); CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field3", "field2", "field4"}); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); validatePojoItem(inputFormat); }
Example #4
Source File: ParquetPojoInputFormat.java From flink with Apache License 2.0 | 6 votes |
/** * Extracts the {@link TypeInformation}s from {@link PojoTypeInfo} according to the given field name. */ private static <E> TypeInformation<?>[] extractTypeInfos(PojoTypeInfo<E> pojoTypeInfo, String[] fieldNames) { Preconditions.checkNotNull(pojoTypeInfo); Preconditions.checkNotNull(fieldNames); Preconditions.checkArgument(pojoTypeInfo.getArity() >= fieldNames.length); TypeInformation<?>[] fieldTypes = new TypeInformation<?>[fieldNames.length]; for (int i = 0; i < fieldNames.length; ++i) { String fieldName = fieldNames[i]; Preconditions.checkNotNull(fieldName, "The field can't be null"); int fieldPos = pojoTypeInfo.getFieldIndex(fieldName); Preconditions.checkArgument(fieldPos >= 0, String.format("Field %s is not a member of POJO type %s", fieldName, pojoTypeInfo.getTypeClass().getName())); fieldTypes[i] = pojoTypeInfo.getTypeAt(fieldPos); } return fieldTypes; }
Example #5
Source File: CsvInputFormatTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testPojoType() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,AAA,3.123,BBB\n"); wrt.write("456,BBB,1.123,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class); CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); validatePojoItem(inputFormat); }
Example #6
Source File: AvroInputFormatTypeExtractionTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testTypeExtraction() { try { InputFormat<MyAvroType, ?> format = new AvroInputFormat<MyAvroType>(new Path("file:///ignore/this/file"), MyAvroType.class); TypeInformation<?> typeInfoDirect = TypeExtractor.getInputFormatTypes(format); ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<MyAvroType> input = env.createInput(format); TypeInformation<?> typeInfoDataSet = input.getType(); Assert.assertTrue(typeInfoDirect instanceof PojoTypeInfo); Assert.assertTrue(typeInfoDataSet instanceof PojoTypeInfo); Assert.assertEquals(MyAvroType.class, typeInfoDirect.getTypeClass()); Assert.assertEquals(MyAvroType.class, typeInfoDataSet.getTypeClass()); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #7
Source File: CsvInputFormatTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testPojoTypeWithMappingInfoAndPartialField() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,3.123,AAA,BBB\n"); wrt.write("456,1.123,BBB,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class); CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field4"}, new boolean[]{true, false, false, true}); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); PojoItem item = new PojoItem(); inputFormat.nextRecord(item); assertEquals(123, item.field1); assertEquals("BBB", item.field4); }
Example #8
Source File: FieldAccessorTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testPojoInPojo() { Outer o = new Outer(10, new Inner(4L), (short) 12); PojoTypeInfo<Outer> tpeInfo = (PojoTypeInfo<Outer>) TypeInformation.of(Outer.class); FieldAccessor<Outer, Long> fix = FieldAccessorFactory.getAccessor(tpeInfo, "i.x", null); assertEquals(4L, (long) fix.get(o)); assertEquals(4L, o.i.x); o = fix.set(o, 22L); assertEquals(22L, (long) fix.get(o)); assertEquals(22L, o.i.x); FieldAccessor<Outer, Inner> fi = FieldAccessorFactory.getAccessor(tpeInfo, "i", null); assertEquals(22L, fi.get(o).x); assertEquals(22L, (long) fix.get(o)); assertEquals(22L, o.i.x); o = fi.set(o, new Inner(30L)); assertEquals(30L, fi.get(o).x); assertEquals(30L, (long) fix.get(o)); assertEquals(30L, o.i.x); }
Example #9
Source File: CassandraSink.java From flink with Apache License 2.0 | 6 votes |
/** * Writes a DataStream into a Cassandra database. * * @param input input DataStream * @param <IN> input type * @return CassandraSinkBuilder, to further configure the sink */ public static <IN> CassandraSinkBuilder<IN> addSink(DataStream<IN> input) { TypeInformation<IN> typeInfo = input.getType(); if (typeInfo instanceof TupleTypeInfo) { DataStream<Tuple> tupleInput = (DataStream<Tuple>) input; return (CassandraSinkBuilder<IN>) new CassandraTupleSinkBuilder<>(tupleInput, tupleInput.getType(), tupleInput.getType().createSerializer(tupleInput.getExecutionEnvironment().getConfig())); } if (typeInfo instanceof RowTypeInfo) { DataStream<Row> rowInput = (DataStream<Row>) input; return (CassandraSinkBuilder<IN>) new CassandraRowSinkBuilder(rowInput, rowInput.getType(), rowInput.getType().createSerializer(rowInput.getExecutionEnvironment().getConfig())); } if (typeInfo instanceof PojoTypeInfo) { return new CassandraPojoSinkBuilder<>(input, input.getType(), input.getType().createSerializer(input.getExecutionEnvironment().getConfig())); } if (typeInfo instanceof CaseClassTypeInfo) { DataStream<Product> productInput = (DataStream<Product>) input; return (CassandraSinkBuilder<IN>) new CassandraScalaProductSinkBuilder<>(productInput, productInput.getType(), productInput.getType().createSerializer(input.getExecutionEnvironment().getConfig())); } throw new IllegalArgumentException("No support for the type of the given DataStream: " + input.getType()); }
Example #10
Source File: FieldInfoUtils.java From flink with Apache License 2.0 | 6 votes |
private static <A> List<FieldInfo> extractFieldInformation( TypeInformation<A> inputType, Expression[] exprs) { final List<FieldInfo> fieldInfos; if (inputType instanceof GenericTypeInfo && inputType.getTypeClass() == Row.class) { throw new ValidationException( "An input of GenericTypeInfo<Row> cannot be converted to Table. " + "Please specify the type of the input with a RowTypeInfo."); } else if (inputType instanceof TupleTypeInfoBase) { fieldInfos = extractFieldInfosFromTupleType((TupleTypeInfoBase<?>) inputType, exprs); } else if (inputType instanceof PojoTypeInfo) { fieldInfos = extractFieldInfosByNameReference((CompositeType<?>) inputType, exprs); } else { fieldInfos = extractFieldInfoFromAtomicType(inputType, exprs); } return fieldInfos; }
Example #11
Source File: CsvInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testPojoType() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,AAA,3.123,BBB\n"); wrt.write("456,BBB,1.123,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class); CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); validatePojoItem(inputFormat); }
Example #12
Source File: AvroInputFormatTypeExtractionTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testTypeExtraction() { try { InputFormat<MyAvroType, ?> format = new AvroInputFormat<MyAvroType>(new Path("file:///ignore/this/file"), MyAvroType.class); TypeInformation<?> typeInfoDirect = TypeExtractor.getInputFormatTypes(format); ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<MyAvroType> input = env.createInput(format); TypeInformation<?> typeInfoDataSet = input.getType(); Assert.assertTrue(typeInfoDirect instanceof PojoTypeInfo); Assert.assertTrue(typeInfoDataSet instanceof PojoTypeInfo); Assert.assertEquals(MyAvroType.class, typeInfoDirect.getTypeClass()); Assert.assertEquals(MyAvroType.class, typeInfoDataSet.getTypeClass()); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #13
Source File: ParquetPojoInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReadPojoFromSimpleRecord() throws IOException { Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> simple = TestUtil.getSimpleRecordTestData(); MessageType messageType = SCHEMA_CONVERTER.convert(TestUtil.SIMPLE_SCHEMA); Path path = TestUtil.createTempParquetFile(tempRoot.getRoot(), TestUtil.SIMPLE_SCHEMA, Collections.singletonList(simple.f1)); ParquetPojoInputFormat<PojoSimpleRecord> inputFormat = new ParquetPojoInputFormat<>( path, messageType, (PojoTypeInfo<PojoSimpleRecord>) Types.POJO(PojoSimpleRecord.class)); inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext()); FileInputSplit[] splits = inputFormat.createInputSplits(1); assertEquals(1, splits.length); inputFormat.open(splits[0]); PojoSimpleRecord simpleRecord = inputFormat.nextRecord(null); assertEquals(simple.f2.getField(0), simpleRecord.getFoo()); assertEquals(simple.f2.getField(1), simpleRecord.getBar()); assertArrayEquals((Long[]) simple.f2.getField(2), simpleRecord.getArr()); }
Example #14
Source File: ParquetPojoInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testProjectedReadPojoFromSimpleRecord() throws IOException, NoSuchFieldError { Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> simple = TestUtil.getSimpleRecordTestData(); MessageType messageType = SCHEMA_CONVERTER.convert(TestUtil.SIMPLE_SCHEMA); Path path = TestUtil.createTempParquetFile(tempRoot.getRoot(), TestUtil.SIMPLE_SCHEMA, Collections.singletonList(simple.f1)); ParquetPojoInputFormat<PojoSimpleRecord> inputFormat = new ParquetPojoInputFormat<>( path, messageType, (PojoTypeInfo<PojoSimpleRecord>) Types.POJO(PojoSimpleRecord.class)); inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext()); FileInputSplit[] splits = inputFormat.createInputSplits(1); assertEquals(1, splits.length); inputFormat.selectFields(new String[]{"foo"}); inputFormat.open(splits[0]); PojoSimpleRecord simpleRecord = inputFormat.nextRecord(null); assertEquals(simple.f2.getField(0), simpleRecord.getFoo()); assertEquals("", simpleRecord.getBar()); assertArrayEquals(new Long[0], simpleRecord.getArr()); }
Example #15
Source File: ParquetPojoInputFormat.java From flink with Apache License 2.0 | 6 votes |
/** * Extracts the {@link TypeInformation}s from {@link PojoTypeInfo} according to the given field name. */ private static <E> TypeInformation<?>[] extractTypeInfos(PojoTypeInfo<E> pojoTypeInfo, String[] fieldNames) { Preconditions.checkNotNull(pojoTypeInfo); Preconditions.checkNotNull(fieldNames); Preconditions.checkArgument(pojoTypeInfo.getArity() >= fieldNames.length); TypeInformation<?>[] fieldTypes = new TypeInformation<?>[fieldNames.length]; for (int i = 0; i < fieldNames.length; ++i) { String fieldName = fieldNames[i]; Preconditions.checkNotNull(fieldName, "The field can't be null"); int fieldPos = pojoTypeInfo.getFieldIndex(fieldName); Preconditions.checkArgument(fieldPos >= 0, String.format("Field %s is not a member of POJO type %s", fieldName, pojoTypeInfo.getTypeClass().getName())); fieldTypes[i] = pojoTypeInfo.getTypeAt(fieldPos); } return fieldTypes; }
Example #16
Source File: FieldInfoUtils.java From flink with Apache License 2.0 | 6 votes |
private static <A> List<FieldInfo> extractFieldInformation( TypeInformation<A> inputType, Expression[] exprs) { final List<FieldInfo> fieldInfos; if (inputType instanceof GenericTypeInfo && inputType.getTypeClass() == Row.class) { throw new ValidationException( "An input of GenericTypeInfo<Row> cannot be converted to Table. " + "Please specify the type of the input with a RowTypeInfo."); } else if (inputType instanceof TupleTypeInfoBase) { fieldInfos = extractFieldInfosFromTupleType((TupleTypeInfoBase<?>) inputType, exprs); } else if (inputType instanceof PojoTypeInfo) { fieldInfos = extractFieldInfosByNameReference((CompositeType<?>) inputType, exprs); } else { fieldInfos = extractFieldInfoFromAtomicType(inputType, exprs); } return fieldInfos; }
Example #17
Source File: CsvInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testPojoTypeWithMappingInfoAndPartialField() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,3.123,AAA,BBB\n"); wrt.write("456,1.123,BBB,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class); CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field4"}, new boolean[]{true, false, false, true}); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); PojoItem item = new PojoItem(); inputFormat.nextRecord(item); assertEquals(123, item.field1); assertEquals("BBB", item.field4); }
Example #18
Source File: StreamOutputHandler.java From bahir-flink with Apache License 2.0 | 6 votes |
@Override public void receive(Event[] events) { StreamRecord<R> reusableRecord = new StreamRecord<>(null, 0L); for (Event event : events) { if (typeInfo == null || Map.class.isAssignableFrom(typeInfo.getTypeClass())) { reusableRecord.replace(toMap(event), event.getTimestamp()); output.collect(reusableRecord); } else if (typeInfo.isTupleType()) { Tuple tuple = this.toTuple(event); reusableRecord.replace(tuple, event.getTimestamp()); output.collect(reusableRecord); } else if (typeInfo instanceof PojoTypeInfo) { R obj; try { obj = objectMapper.convertValue(toMap(event), typeInfo.getTypeClass()); } catch (IllegalArgumentException ex) { LOGGER.error("Failed to map event: " + event + " into type: " + typeInfo, ex); throw ex; } reusableRecord.replace(obj, event.getTimestamp()); output.collect(reusableRecord); } else { throw new IllegalArgumentException("Unable to format " + event + " as type " + typeInfo); } } }
Example #19
Source File: FieldAccessorTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testPojoInPojo() { Outer o = new Outer(10, new Inner(4L), (short) 12); PojoTypeInfo<Outer> tpeInfo = (PojoTypeInfo<Outer>) TypeInformation.of(Outer.class); FieldAccessor<Outer, Long> fix = FieldAccessorFactory.getAccessor(tpeInfo, "i.x", null); assertEquals(4L, (long) fix.get(o)); assertEquals(4L, o.i.x); o = fix.set(o, 22L); assertEquals(22L, (long) fix.get(o)); assertEquals(22L, o.i.x); FieldAccessor<Outer, Inner> fi = FieldAccessorFactory.getAccessor(tpeInfo, "i", null); assertEquals(22L, fi.get(o).x); assertEquals(22L, (long) fix.get(o)); assertEquals(22L, o.i.x); o = fi.set(o, new Inner(30L)); assertEquals(30L, fi.get(o).x); assertEquals(30L, (long) fix.get(o)); assertEquals(30L, o.i.x); }
Example #20
Source File: CsvInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testPojoTypeWithMappingInformation() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,3.123,AAA,BBB\n"); wrt.write("456,1.123,BBB,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class); CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field3", "field2", "field4"}); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); validatePojoItem(inputFormat); }
Example #21
Source File: CsvInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testPojoTypeWithPartialFieldInCSV() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,NODATA,AAA,NODATA,3.123,BBB\n"); wrt.write("456,NODATA,BBB,NODATA,1.123,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class); CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new boolean[]{true, false, true, false, true, true}); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); validatePojoItem(inputFormat); }
Example #22
Source File: CsvInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testPojoTypeWithMappingInformation() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,3.123,AAA,BBB\n"); wrt.write("456,1.123,BBB,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class); CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[]{"field1", "field3", "field2", "field4"}); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); validatePojoItem(inputFormat); }
Example #23
Source File: AvroInputFormatTypeExtractionTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testTypeExtraction() { try { InputFormat<MyAvroType, ?> format = new AvroInputFormat<MyAvroType>(new Path("file:///ignore/this/file"), MyAvroType.class); TypeInformation<?> typeInfoDirect = TypeExtractor.getInputFormatTypes(format); ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<MyAvroType> input = env.createInput(format); TypeInformation<?> typeInfoDataSet = input.getType(); Assert.assertTrue(typeInfoDirect instanceof PojoTypeInfo); Assert.assertTrue(typeInfoDataSet instanceof PojoTypeInfo); Assert.assertEquals(MyAvroType.class, typeInfoDirect.getTypeClass()); Assert.assertEquals(MyAvroType.class, typeInfoDataSet.getTypeClass()); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #24
Source File: SiddhiExecutionPlanSchemaTest.java From bahir-flink with Apache License 2.0 | 6 votes |
@Test public void testStreamSchemaWithPojo() { TypeInformation<Event> typeInfo = TypeExtractor.createTypeInfo(Event.class); assertTrue("Type information should be PojoTypeInfo", typeInfo instanceof PojoTypeInfo); SiddhiStreamSchema<Event> schema = new SiddhiStreamSchema<>(typeInfo, "id", "timestamp", "name", "price"); assertEquals(4, schema.getFieldIndexes().length); StreamDefinition streamDefinition = schema.getStreamDefinition("test_stream"); assertArrayEquals(new String[]{"id", "timestamp", "name", "price"}, streamDefinition.getAttributeNameArray()); assertEquals(Attribute.Type.INT, streamDefinition.getAttributeType("id")); assertEquals(Attribute.Type.LONG, streamDefinition.getAttributeType("timestamp")); assertEquals(Attribute.Type.STRING, streamDefinition.getAttributeType("name")); assertEquals(Attribute.Type.DOUBLE, streamDefinition.getAttributeType("price")); assertEquals("define stream test_stream (id int,timestamp long,name string,price double);", schema.getStreamDefinitionExpression("test_stream")); }
Example #25
Source File: ParquetPojoInputFormatTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testProjectedReadPojoFromSimpleRecord() throws IOException, NoSuchFieldError { Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> simple = TestUtil.getSimpleRecordTestData(); MessageType messageType = SCHEMA_CONVERTER.convert(TestUtil.SIMPLE_SCHEMA); Path path = TestUtil.createTempParquetFile(tempRoot.getRoot(), TestUtil.SIMPLE_SCHEMA, Collections.singletonList(simple.f1)); ParquetPojoInputFormat<PojoSimpleRecord> inputFormat = new ParquetPojoInputFormat<>( path, messageType, (PojoTypeInfo<PojoSimpleRecord>) Types.POJO(PojoSimpleRecord.class)); inputFormat.setRuntimeContext(TestUtil.getMockRuntimeContext()); FileInputSplit[] splits = inputFormat.createInputSplits(1); assertEquals(1, splits.length); inputFormat.selectFields(new String[]{"foo"}); inputFormat.open(splits[0]); PojoSimpleRecord simpleRecord = inputFormat.nextRecord(null); assertEquals(simple.f2.getField(0), simpleRecord.getFoo()); assertEquals("", simpleRecord.getBar()); assertArrayEquals(new Long[0], simpleRecord.getArr()); }
Example #26
Source File: PojoCsvInputFormat.java From flink with Apache License 2.0 | 5 votes |
public PojoCsvInputFormat(Path filePath, String lineDelimiter, String fieldDelimiter, PojoTypeInfo<OUT> pojoTypeInfo, String[] fieldNames, int[] includedFieldsMask) { super(filePath); boolean[] mask = (includedFieldsMask == null) ? createDefaultMask(fieldNames.length) : toBooleanMask(includedFieldsMask); configure(lineDelimiter, fieldDelimiter, pojoTypeInfo, fieldNames, mask); }
Example #27
Source File: CsvInputFormatTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testPojoTypeWithPrivateField() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,AAA,3.123,BBB\n"); wrt.write("456,BBB,1.123,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PrivatePojoItem> typeInfo = (PojoTypeInfo<PrivatePojoItem>) TypeExtractor.createTypeInfo(PrivatePojoItem.class); CsvInputFormat<PrivatePojoItem> inputFormat = new PojoCsvInputFormat<PrivatePojoItem>(new Path(tempFile.toURI().toString()), typeInfo); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); PrivatePojoItem item = new PrivatePojoItem(); inputFormat.nextRecord(item); assertEquals(123, item.field1); assertEquals("AAA", item.field2); assertEquals(Double.valueOf(3.123), item.field3); assertEquals("BBB", item.field4); inputFormat.nextRecord(item); assertEquals(456, item.field1); assertEquals("BBB", item.field2); assertEquals(Double.valueOf(1.123), item.field3); assertEquals("AAA", item.field4); }
Example #28
Source File: CsvInputFormatTest.java From flink with Apache License 2.0 | 5 votes |
/** * Tests that the CSV input format can deal with POJOs which are subclasses. * * @throws Exception */ @Test public void testPojoSubclassType() throws Exception { final String fileContent = "t1,foobar,tweet2\nt2,barfoo,tweet2"; final File tempFile = File.createTempFile("CsvReaderPOJOSubclass", "tmp"); tempFile.deleteOnExit(); OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(tempFile)); writer.write(fileContent); writer.close(); @SuppressWarnings("unchecked") PojoTypeInfo<TwitterPOJO> typeInfo = (PojoTypeInfo<TwitterPOJO>) TypeExtractor.createTypeInfo(TwitterPOJO.class); CsvInputFormat<TwitterPOJO> inputFormat = new PojoCsvInputFormat<>(new Path(tempFile.toURI().toString()), typeInfo); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); List<TwitterPOJO> expected = new ArrayList<>(); for (String line: fileContent.split("\n")) { String[] elements = line.split(","); expected.add(new TwitterPOJO(elements[0], elements[1], elements[2])); } List<TwitterPOJO> actual = new ArrayList<>(); TwitterPOJO pojo; while ((pojo = inputFormat.nextRecord(new TwitterPOJO())) != null) { actual.add(pojo); } assertEquals(expected, actual); }
Example #29
Source File: ExecutionEnvironment.java From flink with Apache License 2.0 | 5 votes |
/** * Registers the given type with the serialization stack. If the type is eventually * serialized as a POJO, then the type is registered with the POJO serializer. If the * type ends up being serialized with Kryo, then it will be registered at Kryo to make * sure that only tags are written. * * @param type The class of the type to register. */ public void registerType(Class<?> type) { if (type == null) { throw new NullPointerException("Cannot register null type class."); } TypeInformation<?> typeInfo = TypeExtractor.createTypeInfo(type); if (typeInfo instanceof PojoTypeInfo) { config.registerPojoType(type); } else { config.registerKryoType(type); } }
Example #30
Source File: PojoCsvInputFormat.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public PojoCsvInputFormat(Path filePath, String lineDelimiter, String fieldDelimiter, PojoTypeInfo<OUT> pojoTypeInfo, String[] fieldNames, int[] includedFieldsMask) { super(filePath); boolean[] mask = (includedFieldsMask == null) ? createDefaultMask(fieldNames.length) : toBooleanMask(includedFieldsMask); configure(lineDelimiter, fieldDelimiter, pojoTypeInfo, fieldNames, mask); }