Java Code Examples for org.apache.flink.api.common.io.OutputFormat#configure()
The following examples show how to use
org.apache.flink.api.common.io.OutputFormat#configure() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CassandraConnectorITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testCassandraBatchRowFormat() throws Exception { OutputFormat<Row> sink = new CassandraRowOutputFormat(injectTableName(INSERT_DATA_QUERY), builder); try { sink.configure(new Configuration()); sink.open(0, 1); for (Row value : rowCollection) { sink.writeRecord(value); } } finally { sink.close(); } ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY)); List<com.datastax.driver.core.Row> rows = rs.all(); Assert.assertEquals(rowCollection.size(), rows.size()); }
Example 2
Source File: CassandraConnectorITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCassandraBatchRowFormat() throws Exception { OutputFormat<Row> sink = new CassandraRowOutputFormat(injectTableName(INSERT_DATA_QUERY), builder); try { sink.configure(new Configuration()); sink.open(0, 1); for (Row value : rowCollection) { sink.writeRecord(value); } } finally { sink.close(); } ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY)); List<com.datastax.driver.core.Row> rows = rs.all(); Assert.assertEquals(rowCollection.size(), rows.size()); }
Example 3
Source File: CassandraConnectorITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCassandraBatchRowFormat() throws Exception { OutputFormat<Row> sink = new CassandraRowOutputFormat(injectTableName(INSERT_DATA_QUERY), builder); try { sink.configure(new Configuration()); sink.open(0, 1); for (Row value : rowCollection) { sink.writeRecord(value); } } finally { sink.close(); } ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY)); List<com.datastax.driver.core.Row> rows = rs.all(); Assert.assertEquals(rowCollection.size(), rows.size()); }
Example 4
Source File: InputOutputFormatVertex.java From flink with Apache License 2.0 | 5 votes |
@Override public void finalizeOnMaster(ClassLoader loader) throws Exception { final InputOutputFormatContainer formatContainer = initInputOutputformatContainer(loader); final ClassLoader original = Thread.currentThread().getContextClassLoader(); try { // set user classloader before calling user code Thread.currentThread().setContextClassLoader(loader); // configure input formats and invoke finalizeGlobal() Map<OperatorID, UserCodeWrapper<? extends OutputFormat<?>>> outputFormats = formatContainer.getOutputFormats(); for (Map.Entry<OperatorID, UserCodeWrapper<? extends OutputFormat<?>>> entry : outputFormats.entrySet()) { final OutputFormat<?> outputFormat; try { outputFormat = entry.getValue().getUserCodeObject(); outputFormat.configure(formatContainer.getParameters(entry.getKey())); } catch (Throwable t) { throw new Exception("Configuring the output format (" + getFormatDescription(entry.getKey()) + ") failed: " + t.getMessage(), t); } if (outputFormat instanceof FinalizeOnMaster) { ((FinalizeOnMaster) outputFormat).finalizeGlobal(getParallelism()); } } } finally { // restore original classloader Thread.currentThread().setContextClassLoader(original); } }
Example 5
Source File: PartitionWriter.java From flink with Apache License 2.0 | 5 votes |
/** * Create a new output format with path, configure it and open it. */ OutputFormat<T> createNewOutputFormat(Path path) throws IOException { OutputFormat<T> format = factory.createOutputFormat(path); format.configure(conf); // Here we just think of it as a single file format, so there can only be a single task. format.open(0, 1); return format; }
Example 6
Source File: InputOutputFormatVertex.java From flink with Apache License 2.0 | 5 votes |
@Override public void finalizeOnMaster(ClassLoader loader) throws Exception { final InputOutputFormatContainer formatContainer = initInputOutputformatContainer(loader); final ClassLoader original = Thread.currentThread().getContextClassLoader(); try { // set user classloader before calling user code Thread.currentThread().setContextClassLoader(loader); // configure input formats and invoke finalizeGlobal() Map<OperatorID, UserCodeWrapper<? extends OutputFormat<?>>> outputFormats = formatContainer.getOutputFormats(); for (Map.Entry<OperatorID, UserCodeWrapper<? extends OutputFormat<?>>> entry : outputFormats.entrySet()) { final OutputFormat<?> outputFormat; try { outputFormat = entry.getValue().getUserCodeObject(); outputFormat.configure(formatContainer.getParameters(entry.getKey())); } catch (Throwable t) { throw new Exception("Configuring the output format (" + getFormatDescription(entry.getKey()) + ") failed: " + t.getMessage(), t); } if (outputFormat instanceof FinalizeOnMaster) { ((FinalizeOnMaster) outputFormat).finalizeGlobal(getParallelism()); } } } finally { // restore original classloader Thread.currentThread().setContextClassLoader(original); } }
Example 7
Source File: CassandraConnectorITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testCassandraBatchPojoFormat() throws Exception { session.execute(CREATE_TABLE_QUERY.replace(TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME)); OutputFormat<CustomCassandraAnnotatedPojo> sink = new CassandraPojoOutputFormat<>(builder, CustomCassandraAnnotatedPojo.class, () -> new Mapper.Option[]{Mapper.Option.saveNullFields(true)}); List<CustomCassandraAnnotatedPojo> customCassandraAnnotatedPojos = IntStream.range(0, 20) .mapToObj(x -> new CustomCassandraAnnotatedPojo(UUID.randomUUID().toString(), x, 0)) .collect(Collectors.toList()); try { sink.configure(new Configuration()); sink.open(0, 1); for (CustomCassandraAnnotatedPojo customCassandraAnnotatedPojo : customCassandraAnnotatedPojos) { sink.writeRecord(customCassandraAnnotatedPojo); } } finally { sink.close(); } ResultSet rs = session.execute(SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME)); Assert.assertEquals(20, rs.all().size()); InputFormat<CustomCassandraAnnotatedPojo, InputSplit> source = new CassandraPojoInputFormat<>(SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, "batches"), builder, CustomCassandraAnnotatedPojo.class); List<CustomCassandraAnnotatedPojo> result = new ArrayList<>(); try { source.configure(new Configuration()); source.open(null); while (!source.reachedEnd()) { CustomCassandraAnnotatedPojo temp = source.nextRecord(null); result.add(temp); } } finally { source.close(); } Assert.assertEquals(20, result.size()); result.sort(Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter)); customCassandraAnnotatedPojos.sort(Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter)); assertThat(result, samePropertyValuesAs(customCassandraAnnotatedPojos)); }
Example 8
Source File: GenericDataSinkBase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") protected void executeOnCollections(List<IN> inputData, RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception { OutputFormat<IN> format = this.formatWrapper.getUserCodeObject(); TypeInformation<IN> inputType = getInput().getOperatorInfo().getOutputType(); if (this.localOrdering != null) { int[] sortColumns = this.localOrdering.getFieldPositions(); boolean[] sortOrderings = this.localOrdering.getFieldSortDirections(); final TypeComparator<IN> sortComparator; if (inputType instanceof CompositeType) { sortComparator = ((CompositeType<IN>) inputType).createComparator(sortColumns, sortOrderings, 0, executionConfig); } else if (inputType instanceof AtomicType) { sortComparator = ((AtomicType<IN>) inputType).createComparator(sortOrderings[0], executionConfig); } else { throw new UnsupportedOperationException("Local output sorting does not support type "+inputType+" yet."); } Collections.sort(inputData, new Comparator<IN>() { @Override public int compare(IN o1, IN o2) { return sortComparator.compare(o1, o2); } }); } if(format instanceof InitializeOnMaster) { ((InitializeOnMaster)format).initializeGlobal(1); } format.configure(this.parameters); if(format instanceof RichOutputFormat){ ((RichOutputFormat<?>) format).setRuntimeContext(ctx); } format.open(0, 1); for (IN element : inputData) { format.writeRecord(element); } format.close(); if(format instanceof FinalizeOnMaster) { ((FinalizeOnMaster)format).finalizeGlobal(1); } }
Example 9
Source File: CassandraConnectorITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testCassandraBatchPojoFormat() throws Exception { session.execute(CREATE_TABLE_QUERY.replace(TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME)); OutputFormat<CustomCassandraAnnotatedPojo> sink = new CassandraPojoOutputFormat<>(builder, CustomCassandraAnnotatedPojo.class, () -> new Mapper.Option[]{Mapper.Option.saveNullFields(true)}); List<CustomCassandraAnnotatedPojo> customCassandraAnnotatedPojos = IntStream.range(0, 20) .mapToObj(x -> new CustomCassandraAnnotatedPojo(UUID.randomUUID().toString(), x, 0)) .collect(Collectors.toList()); try { sink.configure(new Configuration()); sink.open(0, 1); for (CustomCassandraAnnotatedPojo customCassandraAnnotatedPojo : customCassandraAnnotatedPojos) { sink.writeRecord(customCassandraAnnotatedPojo); } } finally { sink.close(); } ResultSet rs = session.execute(SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME)); Assert.assertEquals(20, rs.all().size()); InputFormat<CustomCassandraAnnotatedPojo, InputSplit> source = new CassandraPojoInputFormat<>(SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, "batches"), builder, CustomCassandraAnnotatedPojo.class); List<CustomCassandraAnnotatedPojo> result = new ArrayList<>(); try { source.configure(new Configuration()); source.open(null); while (!source.reachedEnd()) { CustomCassandraAnnotatedPojo temp = source.nextRecord(null); result.add(temp); } } finally { source.close(); } Assert.assertEquals(20, result.size()); result.sort(Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter)); customCassandraAnnotatedPojos.sort(Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter)); assertThat(result, samePropertyValuesAs(customCassandraAnnotatedPojos)); }
Example 10
Source File: GenericDataSinkBase.java From flink with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") protected void executeOnCollections(List<IN> inputData, RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception { OutputFormat<IN> format = this.formatWrapper.getUserCodeObject(); TypeInformation<IN> inputType = getInput().getOperatorInfo().getOutputType(); if (this.localOrdering != null) { int[] sortColumns = this.localOrdering.getFieldPositions(); boolean[] sortOrderings = this.localOrdering.getFieldSortDirections(); final TypeComparator<IN> sortComparator; if (inputType instanceof CompositeType) { sortComparator = ((CompositeType<IN>) inputType).createComparator(sortColumns, sortOrderings, 0, executionConfig); } else if (inputType instanceof AtomicType) { sortComparator = ((AtomicType<IN>) inputType).createComparator(sortOrderings[0], executionConfig); } else { throw new UnsupportedOperationException("Local output sorting does not support type "+inputType+" yet."); } Collections.sort(inputData, new Comparator<IN>() { @Override public int compare(IN o1, IN o2) { return sortComparator.compare(o1, o2); } }); } if(format instanceof InitializeOnMaster) { ((InitializeOnMaster)format).initializeGlobal(1); } format.configure(this.parameters); if(format instanceof RichOutputFormat){ ((RichOutputFormat<?>) format).setRuntimeContext(ctx); } format.open(0, 1); for (IN element : inputData) { format.writeRecord(element); } format.close(); if(format instanceof FinalizeOnMaster) { ((FinalizeOnMaster)format).finalizeGlobal(1); } }
Example 11
Source File: CassandraConnectorITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testCassandraBatchPojoFormat() throws Exception { session.execute(CREATE_TABLE_QUERY.replace(TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME)); OutputFormat<CustomCassandraAnnotatedPojo> sink = new CassandraPojoOutputFormat<>(builder, CustomCassandraAnnotatedPojo.class, () -> new Mapper.Option[]{Mapper.Option.saveNullFields(true)}); List<CustomCassandraAnnotatedPojo> customCassandraAnnotatedPojos = IntStream.range(0, 20) .mapToObj(x -> new CustomCassandraAnnotatedPojo(UUID.randomUUID().toString(), x, 0)) .collect(Collectors.toList()); try { sink.configure(new Configuration()); sink.open(0, 1); for (CustomCassandraAnnotatedPojo customCassandraAnnotatedPojo : customCassandraAnnotatedPojos) { sink.writeRecord(customCassandraAnnotatedPojo); } } finally { sink.close(); } ResultSet rs = session.execute(SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME)); Assert.assertEquals(20, rs.all().size()); InputFormat<CustomCassandraAnnotatedPojo, InputSplit> source = new CassandraPojoInputFormat<>(SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, "batches"), builder, CustomCassandraAnnotatedPojo.class); List<CustomCassandraAnnotatedPojo> result = new ArrayList<>(); try { source.configure(new Configuration()); source.open(null); while (!source.reachedEnd()) { CustomCassandraAnnotatedPojo temp = source.nextRecord(null); result.add(temp); } } finally { source.close(); } Assert.assertEquals(20, result.size()); result.sort(Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter)); customCassandraAnnotatedPojos.sort(Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter)); assertThat(result, samePropertyValuesAs(customCassandraAnnotatedPojos)); }
Example 12
Source File: GenericDataSinkBase.java From flink with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") protected void executeOnCollections(List<IN> inputData, RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception { OutputFormat<IN> format = this.formatWrapper.getUserCodeObject(); TypeInformation<IN> inputType = getInput().getOperatorInfo().getOutputType(); if (this.localOrdering != null) { int[] sortColumns = this.localOrdering.getFieldPositions(); boolean[] sortOrderings = this.localOrdering.getFieldSortDirections(); final TypeComparator<IN> sortComparator; if (inputType instanceof CompositeType) { sortComparator = ((CompositeType<IN>) inputType).createComparator(sortColumns, sortOrderings, 0, executionConfig); } else if (inputType instanceof AtomicType) { sortComparator = ((AtomicType<IN>) inputType).createComparator(sortOrderings[0], executionConfig); } else { throw new UnsupportedOperationException("Local output sorting does not support type "+inputType+" yet."); } Collections.sort(inputData, new Comparator<IN>() { @Override public int compare(IN o1, IN o2) { return sortComparator.compare(o1, o2); } }); } if(format instanceof InitializeOnMaster) { ((InitializeOnMaster)format).initializeGlobal(1); } format.configure(this.parameters); if(format instanceof RichOutputFormat){ ((RichOutputFormat<?>) format).setRuntimeContext(ctx); } format.open(0, 1); for (IN element : inputData) { format.writeRecord(element); } format.close(); if(format instanceof FinalizeOnMaster) { ((FinalizeOnMaster)format).finalizeGlobal(1); } }