Java Code Examples for org.apache.flink.table.api.TableEnvironment#registerTableSource()
The following examples show how to use
org.apache.flink.table.api.TableEnvironment#registerTableSource() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CatalogStatisticsTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testGetStatsFromCatalog() throws Exception { EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build(); TableEnvironment tEnv = TableEnvironment.create(settings); tEnv.registerTableSource("T1", new TestTableSource(true, tableSchema)); tEnv.registerTableSource("T2", new TestTableSource(true, tableSchema)); Catalog catalog = tEnv.getCatalog(tEnv.getCurrentCatalog()).orElse(null); assertNotNull(catalog); catalog.alterTableStatistics(ObjectPath.fromString("default_database.T1"), new CatalogTableStatistics(100, 10, 1000L, 2000L), true); catalog.alterTableStatistics(ObjectPath.fromString("default_database.T2"), new CatalogTableStatistics(100000000, 1000, 1000000000L, 2000000000L), true); catalog.alterTableColumnStatistics(ObjectPath.fromString("default_database.T1"), createColumnStats(), true); catalog.alterTableColumnStatistics(ObjectPath.fromString("default_database.T2"), createColumnStats(), true); Table table = tEnv.sqlQuery("select * from T1, T2 where T1.s3 = T2.s3"); String result = tEnv.explain(table); // T1 is broadcast side String expected = TableTestUtil.readFromResource("/explain/testGetStatsFromCatalog.out"); assertEquals(expected, TableTestUtil.replaceStageId(result)); }
Example 2
Source File: BatchSQLTestProgram.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { ParameterTool params = ParameterTool.fromArgs(args); String outputPath = params.getRequired("outputPath"); String sqlStatement = params.getRequired("sqlStatement"); TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.newInstance() .useBlinkPlanner() .inBatchMode() .build()); tEnv.registerTableSource("table1", new GeneratorTableSource(10, 100, 60, 0)); tEnv.registerTableSource("table2", new GeneratorTableSource(5, 0.2f, 60, 5)); tEnv.registerTableSink("sinkTable", new CsvTableSink(outputPath) .configure(new String[]{"f0", "f1"}, new TypeInformation[]{Types.INT, Types.SQL_TIMESTAMP})); tEnv.sqlUpdate(sqlStatement); tEnv.execute("TestSqlJob"); }
Example 3
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testTableSourceFullScan() throws Exception { TableEnvironment tEnv = createBatchTableEnv(); HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE_1); hbaseTable.addColumn(FAMILY1, F1COL1, Integer.class); hbaseTable.addColumn(FAMILY2, F2COL1, String.class); hbaseTable.addColumn(FAMILY2, F2COL2, Long.class); hbaseTable.addColumn(FAMILY3, F3COL1, Double.class); hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class); hbaseTable.addColumn(FAMILY3, F3COL3, String.class); tEnv.registerTableSource("hTable", hbaseTable); Table table = tEnv.sqlQuery("SELECT " + " h.family1.col1, " + " h.family2.col1, " + " h.family2.col2, " + " h.family3.col1, " + " h.family3.col2, " + " h.family3.col3 " + "FROM hTable AS h"); List<Row> results = collectBatchResult(table); String expected = "10,Hello-1,100,1.01,false,Welt-1\n" + "20,Hello-2,200,2.02,true,Welt-2\n" + "30,Hello-3,300,3.03,false,Welt-3\n" + "40,null,400,4.04,true,Welt-4\n" + "50,Hello-5,500,5.05,false,Welt-5\n" + "60,Hello-6,600,6.06,true,Welt-6\n" + "70,Hello-7,700,7.07,false,Welt-7\n" + "80,null,800,8.08,true,Welt-8\n"; TestBaseUtils.compareResultAsText(results, expected); }
Example 4
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testTableSourceProjection() throws Exception { TableEnvironment tEnv = createBatchTableEnv(); HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE_1); hbaseTable.addColumn(FAMILY1, F1COL1, Integer.class); hbaseTable.addColumn(FAMILY2, F2COL1, String.class); hbaseTable.addColumn(FAMILY2, F2COL2, Long.class); hbaseTable.addColumn(FAMILY3, F3COL1, Double.class); hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class); hbaseTable.addColumn(FAMILY3, F3COL3, String.class); tEnv.registerTableSource("hTable", hbaseTable); Table table = tEnv.sqlQuery("SELECT " + " h.family1.col1, " + " h.family3.col1, " + " h.family3.col2, " + " h.family3.col3 " + "FROM hTable AS h"); List<Row> results = collectBatchResult(table); String expected = "10,1.01,false,Welt-1\n" + "20,2.02,true,Welt-2\n" + "30,3.03,false,Welt-3\n" + "40,4.04,true,Welt-4\n" + "50,5.05,false,Welt-5\n" + "60,6.06,true,Welt-6\n" + "70,7.07,false,Welt-7\n" + "80,8.08,true,Welt-8\n"; TestBaseUtils.compareResultAsText(results, expected); }
Example 5
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testTableSourceFieldOrder() throws Exception { TableEnvironment tEnv = createBatchTableEnv(); HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE_1); // shuffle order of column registration hbaseTable.addColumn(FAMILY2, F2COL1, String.class); hbaseTable.addColumn(FAMILY3, F3COL1, Double.class); hbaseTable.addColumn(FAMILY1, F1COL1, Integer.class); hbaseTable.addColumn(FAMILY2, F2COL2, Long.class); hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class); hbaseTable.addColumn(FAMILY3, F3COL3, String.class); tEnv.registerTableSource("hTable", hbaseTable); Table table = tEnv.sqlQuery("SELECT * FROM hTable AS h"); List<Row> results = collectBatchResult(table); String expected = "Hello-1,100,1.01,false,Welt-1,10\n" + "Hello-2,200,2.02,true,Welt-2,20\n" + "Hello-3,300,3.03,false,Welt-3,30\n" + "null,400,4.04,true,Welt-4,40\n" + "Hello-5,500,5.05,false,Welt-5,50\n" + "Hello-6,600,6.06,true,Welt-6,60\n" + "Hello-7,700,7.07,false,Welt-7,70\n" + "null,800,8.08,true,Welt-8,80\n"; TestBaseUtils.compareResultAsText(results, expected); }
Example 6
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testTableSourceReadAsByteArray() throws Exception { TableEnvironment tEnv = createBatchTableEnv(); // fetch row2 from the table till the end HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE_1); hbaseTable.addColumn(FAMILY2, F2COL1, byte[].class); hbaseTable.addColumn(FAMILY2, F2COL2, byte[].class); tEnv.registerTableSource("hTable", hbaseTable); tEnv.registerFunction("toUTF8", new ToUTF8()); tEnv.registerFunction("toLong", new ToLong()); Table table = tEnv.sqlQuery( "SELECT " + " toUTF8(h.family2.col1), " + " toLong(h.family2.col2) " + "FROM hTable AS h" ); List<Row> results = collectBatchResult(table); String expected = "Hello-1,100\n" + "Hello-2,200\n" + "Hello-3,300\n" + "null,400\n" + "Hello-5,500\n" + "Hello-6,600\n" + "Hello-7,700\n" + "null,800\n"; TestBaseUtils.compareResultAsText(results, expected); }
Example 7
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testTableSink() throws Exception { HBaseTableSchema schema = new HBaseTableSchema(); schema.addColumn(FAMILY1, F1COL1, Integer.class); schema.addColumn(FAMILY2, F2COL1, String.class); schema.addColumn(FAMILY2, F2COL2, Long.class); schema.setRowKey("rk", Integer.class); schema.addColumn(FAMILY3, F3COL1, Double.class); schema.addColumn(FAMILY3, F3COL2, Boolean.class); schema.addColumn(FAMILY3, F3COL3, String.class); Map<String, String> tableProperties = new HashMap<>(); tableProperties.put("connector.type", "hbase"); tableProperties.put("connector.version", "1.4.3"); tableProperties.put("connector.property-version", "1"); tableProperties.put("connector.table-name", TEST_TABLE_2); tableProperties.put("connector.zookeeper.quorum", getZookeeperQuorum()); tableProperties.put("connector.zookeeper.znode.parent", "/hbase"); DescriptorProperties descriptorProperties = new DescriptorProperties(true); descriptorProperties.putTableSchema(SCHEMA, schema.convertsToTableSchema()); descriptorProperties.putProperties(tableProperties); TableSink tableSink = TableFactoryService .find(HBaseTableFactory.class, descriptorProperties.asMap()) .createTableSink(descriptorProperties.asMap()); StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tEnv = StreamTableEnvironment.create(execEnv, streamSettings); DataStream<Row> ds = execEnv.fromCollection(testData1).returns(testTypeInfo1); tEnv.registerDataStream("src", ds); tEnv.registerTableSink("hbase", tableSink); String query = "INSERT INTO hbase SELECT ROW(f1c1), ROW(f2c1, f2c2), rowkey, ROW(f3c1, f3c2, f3c3) FROM src"; tEnv.sqlUpdate(query); // wait to finish tEnv.execute("HBase Job"); // start a batch scan job to verify contents in HBase table // start a batch scan job to verify contents in HBase table TableEnvironment batchTableEnv = createBatchTableEnv(); HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE_2); hbaseTable.setRowKey("rowkey", Integer.class); hbaseTable.addColumn(FAMILY1, F1COL1, Integer.class); hbaseTable.addColumn(FAMILY2, F2COL1, String.class); hbaseTable.addColumn(FAMILY2, F2COL2, Long.class); hbaseTable.addColumn(FAMILY3, F3COL1, Double.class); hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class); hbaseTable.addColumn(FAMILY3, F3COL3, String.class); batchTableEnv.registerTableSource("hTable", hbaseTable); Table table = batchTableEnv.sqlQuery( "SELECT " + " h.rowkey, " + " h.family1.col1, " + " h.family2.col1, " + " h.family2.col2, " + " h.family3.col1, " + " h.family3.col2, " + " h.family3.col3 " + "FROM hTable AS h" ); List<Row> results = collectBatchResult(table); String expected = "1,10,Hello-1,100,1.01,false,Welt-1\n" + "2,20,Hello-2,200,2.02,true,Welt-2\n" + "3,30,Hello-3,300,3.03,false,Welt-3\n" + "4,40,,400,4.04,true,Welt-4\n" + "5,50,Hello-5,500,5.05,false,Welt-5\n" + "6,60,Hello-6,600,6.06,true,Welt-6\n" + "7,70,Hello-7,700,7.07,false,Welt-7\n" + "8,80,,800,8.08,true,Welt-8\n"; TestBaseUtils.compareResultAsText(results, expected); }