org.apache.flink.table.api.java.StreamTableEnvironment Java Examples
The following examples show how to use
org.apache.flink.table.api.java.StreamTableEnvironment.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Sort.java From flink-training-exercises with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(1); DataStream<Event> eventStream = env.addSource(new OutOfOrderEventSource()) .assignTimestampsAndWatermarks(new TimestampsAndWatermarks()); Table events = tableEnv.fromDataStream(eventStream, "eventTime.rowtime"); tableEnv.registerTable("events", events); Table sorted = tableEnv.sqlQuery("SELECT eventTime FROM events ORDER BY eventTime ASC"); DataStream<Row> sortedEventStream = tableEnv.toAppendStream(sorted, Row.class); sortedEventStream.print(); env.execute(); }
Example #2
Source File: ExecutionContextTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testTemporalTables() throws Exception { final ExecutionContext<?> context = createStreamingExecutionContext(); assertEquals( new HashSet<>(Arrays.asList("EnrichmentSource", "HistorySource")), context.getTableSources().keySet()); final StreamTableEnvironment tableEnv = (StreamTableEnvironment) context.createEnvironmentInstance().getTableEnvironment(); assertArrayEquals( new String[]{"EnrichmentSource", "HistorySource", "HistoryView", "TemporalTableUsage"}, tableEnv.listTables()); assertArrayEquals( new String[]{"SourceTemporalTable", "ViewTemporalTable"}, tableEnv.listUserDefinedFunctions()); assertArrayEquals( new String[]{"integerField", "stringField", "rowtimeField", "integerField0", "stringField0", "rowtimeField0"}, tableEnv.scan("TemporalTableUsage").getSchema().getFieldNames()); }
Example #3
Source File: JoinTest.java From sylph with Apache License 2.0 | 6 votes |
@Before public void init() { StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.getExecutionEnvironment(); execEnv.setParallelism(4); execEnv.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); tableEnv = (StreamTableEnvironmentImpl) StreamTableEnvironment.create(execEnv); tableEnv.registerFunction("from_unixtime", new TimeUtil.FromUnixTime()); //---create stream source TypeInformation[] fieldTypes = {Types.STRING(), Types.STRING(), Types.LONG()}; String[] fieldNames = {"topic", "user_id", "time"}; RowTypeInfo rowTypeInfo = new RowTypeInfo(fieldTypes, fieldNames); DataStream<Row> dataSource = execEnv.fromCollection(new ArrayList<>(), rowTypeInfo); tableEnv.registerTableSource("tb1", new SylphTableSource(rowTypeInfo, dataSource)); tableEnv.registerTableSource("tb0", new SylphTableSource(rowTypeInfo, dataSource)); final AntlrSqlParser sqlParser = new AntlrSqlParser(); this.dimTable = (CreateTable) sqlParser.createStatement( "create batch table users(id string, name string, city string) with(type = '" + JoinOperator.class.getName() + "')"); }
Example #4
Source File: CustomKafkaSourceMain.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); blinkStreamEnv.setParallelism(1); EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings); blinkStreamTableEnv.registerTableSource("kafkaDataStream", new MyKafkaTableSource(ExecutionEnvUtil.PARAMETER_TOOL)); RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()}); blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink); Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word"); wordCount.insertInto("sinkTable"); blinkStreamTableEnv.execute("Blink Custom Kafka Table Source"); }
Example #5
Source File: JavaSqlITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testSelect() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); DataStream<Tuple3<Integer, Long, String>> ds = JavaStreamTestData.getSmall3TupleDataSet(env); Table in = tableEnv.fromDataStream(ds, "a,b,c"); tableEnv.registerTable("MyTable", in); String sqlQuery = "SELECT * FROM MyTable"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,1,Hi"); expected.add("2,2,Hello"); expected.add("3,2,Hello world"); StreamITCase.compareWithList(expected); }
Example #6
Source File: CatalogTypes.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) { StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); blinkStreamEnv.setParallelism(1); EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings); blinkStreamTableEnv.registerCatalog("zhisheng", new GenericInMemoryCatalog("zhisheng")); //GenericInMemoryCatalog,默认的 catalog //HiveCatalog,这个需要添加 Hive connector 和 Hive 的依赖 // blinkStreamTableEnv.registerCatalog("zhisheng", new HiveCatalog("zhisheng", "zhisheng", "~/zhisheng/hive/conf", "2.3.4")); }
Example #7
Source File: CatalogAPI.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) { StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); blinkStreamEnv.setParallelism(1); EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings); //Changing the Current Catalog And Database blinkStreamTableEnv.useCatalog("zhisheng"); blinkStreamTableEnv.useDatabase("zhisheng"); blinkStreamTableEnv.scan("not_the_current_catalog", "not_the_current_db", "zhisheng"); //List Available Catalogs/Databases/Tables blinkStreamTableEnv.listCatalogs(); blinkStreamTableEnv.listDatabases(); blinkStreamTableEnv.listTables(); }
Example #8
Source File: SQLExampleWordCount.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); blinkStreamEnv.setParallelism(1); EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings); String path = SQLExampleWordCount.class.getClassLoader().getResource("words.txt").getPath(); CsvTableSource csvTableSource = CsvTableSource.builder() .field("word", Types.STRING) .path(path) .build(); blinkStreamTableEnv.registerTableSource("zhisheng", csvTableSource); Table wordWithCount = blinkStreamTableEnv.sqlQuery("SELECT count(word), word FROM zhisheng GROUP BY word"); blinkStreamTableEnv.toRetractStream(wordWithCount, Row.class).print(); blinkStreamTableEnv.execute("Blink Stream SQL Job"); }
Example #9
Source File: SqlScriptExecutor.java From flink-tutorials with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { HiveCatalog hiveCatalog = new HiveCatalog(HIVE_CATALOG, HIVE_DATABASE, HIVE_CONF_DIR, HIVE_VERSION); StreamTableEnvironment env = createTableEnv(); env.registerCatalog(HIVE_CATALOG, hiveCatalog); File script = new File(args[0]); String[] commands = FileUtils.readFileUtf8(script).split(";"); for (String command : commands) { if (command.trim().isEmpty()) { continue; } LOG.info("Executing SQL statement: {}", command.trim()); env.sqlUpdate(command.trim()); } env.execute("SQL Script: " + script.getName()); }
Example #10
Source File: AbstractFlinkClient.java From alchemy with Apache License 2.0 | 6 votes |
private void registerFunction(StreamTableEnvironment env, SqlSubmitFlinkRequest request) { // 加载公共function List<String> functionNames = Lists.newArrayList(); loadFunction(env, functionNames, ServiceLoader.load(BaseFunction.class)); if (request.getUdfs() == null) { return; } request.getUdfs().forEach(udfDescriptor -> { try { Object udf = udfDescriptor.transform(); register(env, udfDescriptor.getName(), udf); } catch (Exception e) { throw new RuntimeException(e); } }); }
Example #11
Source File: StreamSQLExample.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); DataStream<Order> orderA = env.fromCollection(Arrays.asList( new Order(1L, "beer", 3), new Order(1L, "diaper", 4), new Order(3L, "rubber", 2))); DataStream<Order> orderB = env.fromCollection(Arrays.asList( new Order(2L, "pen", 3), new Order(2L, "rubber", 3), new Order(4L, "beer", 1))); Table tableA = tEnv.fromDataStream(orderA, "user, product, amount"); tEnv.registerDataStream("OrderB", orderB, "user, product, amount"); Table result = tEnv.sqlQuery("SELECT * FROM " + tableA + " WHERE amount > 2 UNION ALL " + "SELECT * FROM OrderB WHERE amount < 2"); tEnv.toAppendStream(result, Order.class).print(); env.execute(); }
Example #12
Source File: ExecutionContextTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testTemporalTables() throws Exception { final ExecutionContext<?> context = createStreamingExecutionContext(); assertEquals( new HashSet<>(Arrays.asList("EnrichmentSource", "HistorySource")), context.getTableSources().keySet()); final StreamTableEnvironment tableEnv = (StreamTableEnvironment) context.createEnvironmentInstance().getTableEnvironment(); assertArrayEquals( new String[]{"EnrichmentSource", "HistorySource", "HistoryView", "TemporalTableUsage"}, tableEnv.listTables()); assertArrayEquals( new String[]{"SourceTemporalTable", "ViewTemporalTable"}, tableEnv.listUserDefinedFunctions()); assertArrayEquals( new String[]{"integerField", "stringField", "rowtimeField", "integerField0", "stringField0", "rowtimeField0"}, tableEnv.scan("TemporalTableUsage").getSchema().getFieldNames()); }
Example #13
Source File: ExecutionContext.java From flink with Apache License 2.0 | 6 votes |
private void registerTemporalTable(TemporalTableEntry temporalTableEntry) { try { final Table table = tableEnv.scan(temporalTableEntry.getHistoryTable()); final TableFunction<?> function = table.createTemporalTableFunction( temporalTableEntry.getTimeAttribute(), String.join(",", temporalTableEntry.getPrimaryKeyFields())); if (tableEnv instanceof StreamTableEnvironment) { StreamTableEnvironment streamTableEnvironment = (StreamTableEnvironment) tableEnv; streamTableEnvironment.registerFunction(temporalTableEntry.getName(), function); } else { BatchTableEnvironment batchTableEnvironment = (BatchTableEnvironment) tableEnv; batchTableEnvironment.registerFunction(temporalTableEntry.getName(), function); } } catch (Exception e) { throw new SqlExecutionException( "Invalid temporal table '" + temporalTableEntry.getName() + "' over table '" + temporalTableEntry.getHistoryTable() + ".\nCause: " + e.getMessage()); } }
Example #14
Source File: KafkaSourceMain.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); blinkStreamEnv.setParallelism(1); EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings); ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL; Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool); DataStream<String> dataStream = blinkStreamEnv.addSource(new FlinkKafkaConsumer011<>(parameterTool.get("kafka.topic"), new SimpleStringSchema(), properties)); Table table = blinkStreamTableEnv.fromDataStream(dataStream, "word"); blinkStreamTableEnv.registerTable("kafkaDataStream", table); RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()}); blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink); Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word"); wordCount.insertInto("sinkTable"); blinkStreamTableEnv.execute("Blink Kafka Table Source"); }
Example #15
Source File: CatalogAPI.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) { StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); blinkStreamEnv.setParallelism(1); EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings); //Changing the Current Catalog And Database blinkStreamTableEnv.useCatalog("zhisheng"); blinkStreamTableEnv.useDatabase("zhisheng"); blinkStreamTableEnv.scan("not_the_current_catalog", "not_the_current_db", "zhisheng"); //List Available Catalogs/Databases/Tables blinkStreamTableEnv.listCatalogs(); blinkStreamTableEnv.listDatabases(); blinkStreamTableEnv.listTables(); }
Example #16
Source File: StreamSQLExample.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); DataStream<Order> orderA = env.fromCollection(Arrays.asList( new Order(1L, "beer", 3), new Order(1L, "diaper", 4), new Order(3L, "rubber", 2))); DataStream<Order> orderB = env.fromCollection(Arrays.asList( new Order(2L, "pen", 3), new Order(2L, "rubber", 3), new Order(4L, "beer", 1))); Table tableA = tEnv.fromDataStream(orderA, "user, product, amount"); tEnv.registerDataStream("OrderB", orderB, "user, product, amount"); Table result = tEnv.sqlQuery("SELECT * FROM " + tableA + " WHERE amount > 2 UNION ALL " + "SELECT * FROM OrderB WHERE amount < 2"); tEnv.toAppendStream(result, Order.class).print(); env.execute(); }
Example #17
Source File: JsonPathUdfTest.java From sylph with Apache License 2.0 | 6 votes |
@Before public void init() throws JsonProcessingException { String json = MAPPER.writeValueAsString(ImmutableMap.of("user_id", "uid_001", "ip", "127.0.0.1", "store", 12.0, "key1", ImmutableMap.of("key2", 123) )); StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.createLocalEnvironment(); execEnv.setParallelism(2); tableEnv = StreamTableEnvironment.create(execEnv); tableEnv.registerFunction("get_json_object", new UDFJson()); table = tableEnv.sqlQuery("select '" + json + "' as message"); }
Example #18
Source File: TableExampleWordCount.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); blinkStreamEnv.setParallelism(1); EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings); String path = TableExampleWordCount.class.getClassLoader().getResource("words.txt").getPath(); blinkStreamTableEnv .connect(new FileSystem().path(path)) .withFormat(new OldCsv().field("word", Types.STRING).lineDelimiter("\n")) .withSchema(new Schema().field("word", Types.STRING)) .inAppendMode() .registerTableSource("FlieSourceTable"); Table wordWithCount = blinkStreamTableEnv.scan("FlieSourceTable") .groupBy("word") .select("word,count(word) as _count"); blinkStreamTableEnv.toRetractStream(wordWithCount, Row.class).print(); //打印结果中的 true 和 false,可能会有点疑问,为啥会多出一个字段。 //Sink 做的事情是先删除再插入,false 表示删除上一条数据,true 表示插入该条数据 blinkStreamTableEnv.execute("Blink Stream SQL Job"); }
Example #19
Source File: TableSqlTest.java From sylph with Apache License 2.0 | 5 votes |
@Test public void selectLocalTimeTest() throws Exception { StreamTableEnvironment tableEnv = getTableEnv(); tableEnv.toAppendStream(tableEnv.sqlQuery("select LOCALTIMESTAMP as `check_time`"), Row.class).print(); tableEnv.execute(""); }
Example #20
Source File: JavaSqlITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testUnion() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); DataStream<Tuple3<Integer, Long, String>> ds1 = JavaStreamTestData.getSmall3TupleDataSet(env); Table t1 = tableEnv.fromDataStream(ds1, "a,b,c"); tableEnv.registerTable("T1", t1); DataStream<Tuple5<Integer, Long, Integer, String, Long>> ds2 = JavaStreamTestData.get5TupleDataStream(env); tableEnv.registerDataStream("T2", ds2, "a, b, d, c, e"); String sqlQuery = "SELECT * FROM T1 " + "UNION ALL " + "(SELECT a, b, c FROM T2 WHERE a < 3)"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,1,Hi"); expected.add("2,2,Hello"); expected.add("3,2,Hello world"); expected.add("1,1,Hallo"); expected.add("2,2,Hallo Welt"); expected.add("2,3,Hallo Welt wie"); StreamITCase.compareWithList(expected); }
Example #21
Source File: TestStreamMode.java From sylph with Apache License 2.0 | 5 votes |
@Before public void init() { StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.getExecutionEnvironment(); execEnv.setParallelism(2); execEnv.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); tableEnv = StreamTableEnvironment.create(execEnv); }
Example #22
Source File: TableSqlTest.java From sylph with Apache License 2.0 | 5 votes |
public static StreamTableEnvironment getTableEnv() { StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.createLocalEnvironment(); execEnv.setParallelism(2); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(execEnv); return tableEnv; }
Example #23
Source File: JavaSqlITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testRowRegisterRowWithNames() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); List<Row> data = new ArrayList<>(); data.add(Row.of(1, 1L, "Hi")); data.add(Row.of(2, 2L, "Hello")); data.add(Row.of(3, 2L, "Hello world")); TypeInformation<?>[] types = { BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO}; String[] names = {"a", "b", "c"}; RowTypeInfo typeInfo = new RowTypeInfo(types, names); DataStream<Row> ds = env.fromCollection(data).returns(typeInfo); Table in = tableEnv.fromDataStream(ds, "a,b,c"); tableEnv.registerTable("MyTableRow", in); String sqlQuery = "SELECT a,c FROM MyTableRow"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,Hi"); expected.add("2,Hello"); expected.add("3,Hello world"); StreamITCase.compareWithList(expected); }
Example #24
Source File: JobCompiler.java From AthenaX with Apache License 2.0 | 5 votes |
public static CompilationResult compileJob(JobDescriptor job) { StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.createLocalEnvironment(); StreamTableEnvironment env = StreamTableEnvironment.getTableEnvironment(execEnv); execEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); CompilationResult res = new CompilationResult(); try { res.jobGraph(new JobCompiler(env, job).getJobGraph()); } catch (IOException e) { res.remoteThrowable(e); } return res; }
Example #25
Source File: JDBCUpsertTableSinkITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAppend() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().enableObjectReuse(); env.getConfig().setParallelism(1); StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); Table t = tEnv.fromDataStream(get3TupleDataStream(env), "id, num, text"); tEnv.registerTable("T", t); String[] fields = {"id", "num"}; tEnv.registerTableSink("upsertSink", JDBCUpsertTableSink.builder() .setOptions(JDBCOptions.builder() .setDBUrl(DB_URL) .setTableName(OUTPUT_TABLE2) .build()) .setTableSchema(TableSchema.builder().fields( fields, new DataType[] {INT(), BIGINT()}).build()) .build()); tEnv.sqlUpdate("INSERT INTO upsertSink SELECT id, num FROM T WHERE id IN (2, 10, 20)"); env.execute(); check(new Row[] { Row.of(2, 2), Row.of(10, 4), Row.of(20, 6) }, DB_URL, OUTPUT_TABLE2, fields); }
Example #26
Source File: JDBCUpsertTableSinkITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testUpsert() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().enableObjectReuse(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); Table t = tEnv.fromDataStream(get3TupleDataStream(env).assignTimestampsAndWatermarks( new AscendingTimestampExtractor<Tuple3<Integer, Long, String>>() { @Override public long extractAscendingTimestamp(Tuple3<Integer, Long, String> element) { return element.f0; }}), "id, num, text"); tEnv.registerTable("T", t); String[] fields = {"cnt", "lencnt", "cTag"}; tEnv.registerTableSink("upsertSink", JDBCUpsertTableSink.builder() .setOptions(JDBCOptions.builder() .setDBUrl(DB_URL) .setTableName(OUTPUT_TABLE1) .build()) .setTableSchema(TableSchema.builder().fields( fields, new DataType[] {BIGINT(), BIGINT(), INT()}).build()) .build()); tEnv.sqlUpdate("INSERT INTO upsertSink SELECT cnt, COUNT(len) AS lencnt, cTag FROM" + " (SELECT len, COUNT(id) as cnt, cTag FROM" + " (SELECT id, CHAR_LENGTH(text) AS len, (CASE WHEN id > 0 THEN 1 ELSE 0 END) cTag FROM T)" + " GROUP BY len, cTag)" + " GROUP BY cnt, cTag"); env.execute(); check(new Row[] { Row.of(1, 5, 1), Row.of(7, 1, 1), Row.of(9, 1, 1) }, DB_URL, OUTPUT_TABLE1, fields); }
Example #27
Source File: JavaSqlITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testRowRegisterRowWithNames() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); List<Row> data = new ArrayList<>(); data.add(Row.of(1, 1L, "Hi")); data.add(Row.of(2, 2L, "Hello")); data.add(Row.of(3, 2L, "Hello world")); TypeInformation<?>[] types = { BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO}; String[] names = {"a", "b", "c"}; RowTypeInfo typeInfo = new RowTypeInfo(types, names); DataStream<Row> ds = env.fromCollection(data).returns(typeInfo); Table in = tableEnv.fromDataStream(ds, "a,b,c"); tableEnv.registerTable("MyTableRow", in); String sqlQuery = "SELECT a,c FROM MyTableRow"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,Hi"); expected.add("2,Hello"); expected.add("3,Hello world"); StreamITCase.compareWithList(expected); }
Example #28
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testHBaseLookupTableSource() throws Exception { if (OLD_PLANNER.equals(planner)) { // lookup table source is only supported in blink planner, skip for old planner return; } StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(streamEnv, streamSettings); StreamITCase.clear(); // prepare a source table String srcTableName = "src"; DataStream<Row> ds = streamEnv.fromCollection(testData2).returns(testTypeInfo2); Table in = streamTableEnv.fromDataStream(ds, "a, b, c, proc.proctime"); streamTableEnv.registerTable(srcTableName, in); Map<String, String> tableProperties = hbaseTableProperties(); TableSource source = TableFactoryService .find(HBaseTableFactory.class, tableProperties) .createTableSource(tableProperties); streamTableEnv.registerTableSource("hbaseLookup", source); // perform a temporal table join query String query = "SELECT a,family1.col1, family3.col3 FROM src " + "JOIN hbaseLookup FOR SYSTEM_TIME AS OF src.proc as h ON src.a = h.rk"; Table result = streamTableEnv.sqlQuery(query); DataStream<Row> resultSet = streamTableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<>()); streamEnv.execute(); List<String> expected = new ArrayList<>(); expected.add("1,10,Welt-1"); expected.add("2,20,Welt-2"); expected.add("3,30,Welt-3"); expected.add("3,30,Welt-3"); StreamITCase.compareWithList(expected); }
Example #29
Source File: TableEnvironmentExample1.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) { //流作业 StreamTableEnvironment.create(StreamExecutionEnvironment.getExecutionEnvironment()); //批作业 BatchTableEnvironment.create(ExecutionEnvironment.getExecutionEnvironment()); //use EnvironmentSettings StreamTableEnvironment.create(StreamExecutionEnvironment.getExecutionEnvironment(), EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()); StreamTableEnvironment.create(StreamExecutionEnvironment.getExecutionEnvironment(), EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()); //use table config StreamTableEnvironment.create(StreamExecutionEnvironment.getExecutionEnvironment(), TableConfig.getDefault()); }
Example #30
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testHBaseLookupFunction() throws Exception { StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(streamEnv, streamSettings); StreamITCase.clear(); // prepare a source table DataStream<Row> ds = streamEnv.fromCollection(testData2).returns(testTypeInfo2); Table in = streamTableEnv.fromDataStream(ds, "a, b, c"); streamTableEnv.registerTable("src", in); Map<String, String> tableProperties = hbaseTableProperties(); TableSource source = TableFactoryService .find(HBaseTableFactory.class, tableProperties) .createTableSource(tableProperties); streamTableEnv.registerFunction("hbaseLookup", ((HBaseTableSource) source).getLookupFunction(new String[]{ROWKEY})); // perform a temporal table join query String sqlQuery = "SELECT a,family1.col1, family3.col3 FROM src, LATERAL TABLE(hbaseLookup(a))"; Table result = streamTableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = streamTableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<>()); streamEnv.execute(); List<String> expected = new ArrayList<>(); expected.add("1,10,Welt-1"); expected.add("2,20,Welt-2"); expected.add("3,30,Welt-3"); expected.add("3,30,Welt-3"); StreamITCase.compareWithList(expected); }