Java Code Examples for org.apache.flink.table.api.TableEnvironment#fromTableSource()
The following examples show how to use
org.apache.flink.table.api.TableEnvironment#fromTableSource() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveTableSinkTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testInsertIntoNonPartitionTable() throws Exception { String dbName = "default"; String tblName = "dest"; RowTypeInfo rowTypeInfo = createDestTable(dbName, tblName, 0); ObjectPath tablePath = new ObjectPath(dbName, tblName); TableEnvironment tableEnv = HiveTestUtils.createTableEnv(); List<Row> toWrite = generateRecords(5); Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("src", src); tableEnv.registerCatalog("hive", hiveCatalog); tableEnv.sqlQuery("select * from src").insertInto("hive", "default", "dest"); tableEnv.execute("mytest"); verifyWrittenData(toWrite, hiveShell.executeQuery("select * from " + tblName)); hiveCatalog.dropTable(tablePath, false); }
Example 2
Source File: HiveTableSinkTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testInsertIntoDynamicPartition() throws Exception { String dbName = "default"; String tblName = "dest"; RowTypeInfo rowTypeInfo = createDestTable(dbName, tblName, 1); ObjectPath tablePath = new ObjectPath(dbName, tblName); TableEnvironment tableEnv = HiveTestUtils.createTableEnv(); List<Row> toWrite = generateRecords(5); Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("src", src); tableEnv.registerCatalog("hive", hiveCatalog); tableEnv.sqlQuery("select * from src").insertInto("hive", "default", "dest"); tableEnv.execute("mytest"); List<CatalogPartitionSpec> partitionSpecs = hiveCatalog.listPartitions(tablePath); assertEquals(toWrite.size(), partitionSpecs.size()); verifyWrittenData(toWrite, hiveShell.executeQuery("select * from " + tblName)); hiveCatalog.dropTable(tablePath, false); }
Example 3
Source File: HiveTableSinkITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testInsertIntoNonPartitionTable() throws Exception { String dbName = "default"; String tblName = "dest"; RowTypeInfo rowTypeInfo = createHiveDestTable(dbName, tblName, 0); ObjectPath tablePath = new ObjectPath(dbName, tblName); TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode(); List<Row> toWrite = generateRecords(5); Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("src", src); tableEnv.registerCatalog("hive", hiveCatalog); TableEnvUtil.execInsertTableAndWaitResult(tableEnv.sqlQuery("select * from src"), "hive.`default`.dest"); verifyWrittenData(toWrite, hiveShell.executeQuery("select * from " + tblName)); hiveCatalog.dropTable(tablePath, false); }
Example 4
Source File: HiveTableSinkTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testWriteNestedComplexType() throws Exception { String dbName = "default"; String tblName = "dest"; ObjectPath tablePath = new ObjectPath(dbName, tblName); // nested complex types TableSchema.Builder builder = new TableSchema.Builder(); // array of rows builder.fields(new String[]{"a"}, new DataType[]{DataTypes.ARRAY( DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING())))}); RowTypeInfo rowTypeInfo = createDestTable(dbName, tblName, builder.build(), 0); Row row = new Row(rowTypeInfo.getArity()); Object[] array = new Object[3]; row.setField(0, array); for (int i = 0; i < array.length; i++) { Row struct = new Row(2); struct.setField(0, 1 + i); struct.setField(1, String.valueOf((char) ('a' + i))); array[i] = struct; } List<Row> toWrite = new ArrayList<>(); toWrite.add(row); TableEnvironment tableEnv = HiveTestUtils.createTableEnv(); Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("nestedSrc", src); tableEnv.registerCatalog("hive", hiveCatalog); tableEnv.sqlQuery("select * from nestedSrc").insertInto("hive", "default", "dest"); tableEnv.execute("mytest"); List<String> result = hiveShell.executeQuery("select * from " + tblName); assertEquals(1, result.size()); assertEquals("[{\"f1\":1,\"f2\":\"a\"},{\"f1\":2,\"f2\":\"b\"},{\"f1\":3,\"f2\":\"c\"}]", result.get(0)); hiveCatalog.dropTable(tablePath, false); }
Example 5
Source File: HiveTableSinkTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testInsertIntoStaticPartition() throws Exception { String dbName = "default"; String tblName = "dest"; RowTypeInfo rowTypeInfo = createDestTable(dbName, tblName, 1); ObjectPath tablePath = new ObjectPath(dbName, tblName); TableEnvironment tableEnv = HiveTestUtils.createTableEnv(); List<Row> toWrite = generateRecords(1); Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("src", src); Map<String, String> partSpec = new HashMap<>(); partSpec.put("s", "a"); CatalogTable table = (CatalogTable) hiveCatalog.getTable(tablePath); HiveTableSink hiveTableSink = new HiveTableSink(new JobConf(hiveConf), tablePath, table); hiveTableSink.setStaticPartition(partSpec); tableEnv.registerTableSink("destSink", hiveTableSink); tableEnv.sqlQuery("select * from src").insertInto("destSink"); tableEnv.execute("mytest"); // make sure new partition is created assertEquals(toWrite.size(), hiveCatalog.listPartitions(tablePath).size()); verifyWrittenData(toWrite, hiveShell.executeQuery("select * from " + tblName)); hiveCatalog.dropTable(tablePath, false); }
Example 6
Source File: HiveTableSinkTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testInsertOverwrite() throws Exception { String dbName = "default"; String tblName = "dest"; RowTypeInfo rowTypeInfo = createDestTable(dbName, tblName, 0); ObjectPath tablePath = new ObjectPath(dbName, tblName); TableEnvironment tableEnv = HiveTestUtils.createTableEnv(); // write some data and verify List<Row> toWrite = generateRecords(5); Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("src", src); CatalogTable table = (CatalogTable) hiveCatalog.getTable(tablePath); tableEnv.registerTableSink("destSink", new HiveTableSink(new JobConf(hiveConf), tablePath, table)); tableEnv.sqlQuery("select * from src").insertInto("destSink"); tableEnv.execute("mytest"); verifyWrittenData(toWrite, hiveShell.executeQuery("select * from " + tblName)); // write some data to overwrite existing data and verify toWrite = generateRecords(3); Table src1 = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("src1", src1); HiveTableSink sink = new HiveTableSink(new JobConf(hiveConf), tablePath, table); sink.setOverwrite(true); tableEnv.registerTableSink("destSink1", sink); tableEnv.sqlQuery("select * from src1").insertInto("destSink1"); tableEnv.execute("mytest"); verifyWrittenData(toWrite, hiveShell.executeQuery("select * from " + tblName)); hiveCatalog.dropTable(tablePath, false); }
Example 7
Source File: HiveTableSinkITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testWriteNestedComplexType() throws Exception { String dbName = "default"; String tblName = "dest"; ObjectPath tablePath = new ObjectPath(dbName, tblName); // nested complex types TableSchema.Builder builder = new TableSchema.Builder(); // array of rows builder.fields(new String[]{"a"}, new DataType[]{DataTypes.ARRAY( DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING())))}); RowTypeInfo rowTypeInfo = createHiveDestTable(dbName, tblName, builder.build(), 0); Row row = new Row(rowTypeInfo.getArity()); Object[] array = new Object[3]; row.setField(0, array); for (int i = 0; i < array.length; i++) { Row struct = new Row(2); struct.setField(0, 1 + i); struct.setField(1, String.valueOf((char) ('a' + i))); array[i] = struct; } List<Row> toWrite = new ArrayList<>(); toWrite.add(row); TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode(); Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("nestedSrc", src); tableEnv.registerCatalog("hive", hiveCatalog); TableEnvUtil.execInsertTableAndWaitResult(tableEnv.sqlQuery("select * from nestedSrc"), "hive.`default`.dest"); List<String> result = hiveShell.executeQuery("select * from " + tblName); assertEquals(1, result.size()); assertEquals("[{\"f1\":1,\"f2\":\"a\"},{\"f1\":2,\"f2\":\"b\"},{\"f1\":3,\"f2\":\"c\"}]", result.get(0)); hiveCatalog.dropTable(tablePath, false); }
Example 8
Source File: HiveTableSinkTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testWriteComplexType() throws Exception { String dbName = "default"; String tblName = "dest"; ObjectPath tablePath = new ObjectPath(dbName, tblName); TableSchema.Builder builder = new TableSchema.Builder(); builder.fields(new String[]{"a", "m", "s"}, new DataType[]{ DataTypes.ARRAY(DataTypes.INT()), DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()), DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING()))}); RowTypeInfo rowTypeInfo = createDestTable(dbName, tblName, builder.build(), 0); List<Row> toWrite = new ArrayList<>(); Row row = new Row(rowTypeInfo.getArity()); Object[] array = new Object[]{1, 2, 3}; Map<Integer, String> map = new HashMap<Integer, String>() {{ put(1, "a"); put(2, "b"); }}; Row struct = new Row(2); struct.setField(0, 3); struct.setField(1, "c"); row.setField(0, array); row.setField(1, map); row.setField(2, struct); toWrite.add(row); TableEnvironment tableEnv = HiveTestUtils.createTableEnv(); Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("complexSrc", src); tableEnv.registerCatalog("hive", hiveCatalog); tableEnv.sqlQuery("select * from complexSrc").insertInto("hive", "default", "dest"); tableEnv.execute("mytest"); List<String> result = hiveShell.executeQuery("select * from " + tblName); assertEquals(1, result.size()); assertEquals("[1,2,3]\t{1:\"a\",2:\"b\"}\t{\"f1\":3,\"f2\":\"c\"}", result.get(0)); hiveCatalog.dropTable(tablePath, false); }
Example 9
Source File: HiveTableSinkITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testWriteComplexType() throws Exception { String dbName = "default"; String tblName = "dest"; ObjectPath tablePath = new ObjectPath(dbName, tblName); TableSchema.Builder builder = new TableSchema.Builder(); builder.fields(new String[]{"a", "m", "s"}, new DataType[]{ DataTypes.ARRAY(DataTypes.INT()), DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()), DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING()))}); RowTypeInfo rowTypeInfo = createHiveDestTable(dbName, tblName, builder.build(), 0); List<Row> toWrite = new ArrayList<>(); Row row = new Row(rowTypeInfo.getArity()); Object[] array = new Object[]{1, 2, 3}; Map<Integer, String> map = new HashMap<Integer, String>() {{ put(1, "a"); put(2, "b"); }}; Row struct = new Row(2); struct.setField(0, 3); struct.setField(1, "c"); row.setField(0, array); row.setField(1, map); row.setField(2, struct); toWrite.add(row); TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode(); Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo)); tableEnv.registerTable("complexSrc", src); tableEnv.registerCatalog("hive", hiveCatalog); TableEnvUtil.execInsertTableAndWaitResult(tableEnv.sqlQuery("select * from complexSrc"), "hive.`default`.dest"); List<String> result = hiveShell.executeQuery("select * from " + tblName); assertEquals(1, result.size()); assertEquals("[1,2,3]\t{1:\"a\",2:\"b\"}\t{\"f1\":3,\"f2\":\"c\"}", result.get(0)); hiveCatalog.dropTable(tablePath, false); }