Java Code Examples for org.apache.flink.table.api.TableEnvironment#sqlUpdate()
The following examples show how to use
org.apache.flink.table.api.TableEnvironment#sqlUpdate() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TableEnvHiveConnectorTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testDefaultPartitionName() throws Exception { hiveShell.execute("create database db1"); hiveShell.execute("create table db1.src (x int, y int)"); hiveShell.execute("create table db1.part (x int) partitioned by (y int)"); hiveShell.insertInto("db1", "src").addRow(1, 1).addRow(2, null).commit(); TableEnvironment tableEnv = getTableEnvWithHiveCatalog(); // test generating partitions with default name tableEnv.sqlUpdate("insert into db1.part select * from db1.src"); tableEnv.execute("mytest"); HiveConf hiveConf = hiveShell.getHiveConf(); String defaultPartName = hiveConf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME); Table hiveTable = hmsClient.getTable("db1", "part"); Path defaultPartPath = new Path(hiveTable.getSd().getLocation(), "y=" + defaultPartName); FileSystem fs = defaultPartPath.getFileSystem(hiveConf); assertTrue(fs.exists(defaultPartPath)); // TODO: test reading from flink when https://issues.apache.org/jira/browse/FLINK-13279 is fixed assertEquals(Arrays.asList("1\t1", "2\tNULL"), hiveShell.executeQuery("select * from db1.part")); hiveShell.execute("drop database db1 cascade"); }
Example 2
Source File: TableEnvHiveConnectorTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testDecimal() throws Exception { hiveShell.execute("create database db1"); try { hiveShell.execute("create table db1.src1 (x decimal(10,2))"); hiveShell.execute("create table db1.src2 (x decimal(10,2))"); hiveShell.execute("create table db1.dest (x decimal(10,2))"); // populate src1 from Hive hiveShell.execute("insert into db1.src1 values (1.0),(2.12),(5.123),(5.456),(123456789.12)"); TableEnvironment tableEnv = getTableEnvWithHiveCatalog(); // populate src2 with same data from Flink tableEnv.sqlUpdate("insert into db1.src2 values (cast(1.0 as decimal(10,2))), (cast(2.12 as decimal(10,2))), " + "(cast(5.123 as decimal(10,2))), (cast(5.456 as decimal(10,2))), (cast(123456789.12 as decimal(10,2)))"); tableEnv.execute("test1"); // verify src1 and src2 contain same data verifyHiveQueryResult("select * from db1.src2", hiveShell.executeQuery("select * from db1.src1")); // populate dest with src1 from Flink -- to test reading decimal type from Hive tableEnv.sqlUpdate("insert into db1.dest select * from db1.src1"); tableEnv.execute("test2"); verifyHiveQueryResult("select * from db1.dest", hiveShell.executeQuery("select * from db1.src1")); } finally { hiveShell.execute("drop database db1 cascade"); } }
Example 3
Source File: BatchSQLTestProgram.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { ParameterTool params = ParameterTool.fromArgs(args); String outputPath = params.getRequired("outputPath"); String sqlStatement = params.getRequired("sqlStatement"); TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.newInstance() .useBlinkPlanner() .inBatchMode() .build()); tEnv.registerTableSource("table1", new GeneratorTableSource(10, 100, 60, 0)); tEnv.registerTableSource("table2", new GeneratorTableSource(5, 0.2f, 60, 5)); tEnv.registerTableSink("sinkTable", new CsvTableSink(outputPath) .configure(new String[]{"f0", "f1"}, new TypeInformation[]{Types.INT, Types.SQL_TIMESTAMP})); tEnv.sqlUpdate(sqlStatement); tEnv.execute("TestSqlJob"); }
Example 4
Source File: FunctionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testDropTemporarySystemFunction() { TableEnvironment tableEnv = getTableEnvironment(); String ddl1 = "create temporary system function f5" + " as '" + TEST_FUNCTION + "'"; String ddl2 = "drop temporary system function f5"; String ddl3 = "drop temporary system function if exists f5"; tableEnv.sqlUpdate(ddl1); tableEnv.sqlUpdate(ddl2); tableEnv.sqlUpdate(ddl3); try { tableEnv.sqlUpdate(ddl2); } catch (Exception e) { assertEquals( "Could not drop temporary system function. A function named 'f5' doesn't exist.", e.getMessage()); } }
Example 5
Source File: FunctionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testAlterFunction() throws Exception { TableEnvironment tableEnv = getTableEnvironment(); String create = "create function f3 as 'org.apache.flink.function.TestFunction'"; String alter = "alter function f3 as 'org.apache.flink.function.TestFunction2'"; ObjectPath objectPath = new ObjectPath("default_database", "f3"); assertTrue(tableEnv.getCatalog("default_catalog").isPresent()); Catalog catalog = tableEnv.getCatalog("default_catalog").get(); tableEnv.sqlUpdate(create); CatalogFunction beforeUpdate = catalog.getFunction(objectPath); assertEquals("org.apache.flink.function.TestFunction", beforeUpdate.getClassName()); tableEnv.sqlUpdate(alter); CatalogFunction afterUpdate = catalog.getFunction(objectPath); assertEquals("org.apache.flink.function.TestFunction2", afterUpdate.getClassName()); }
Example 6
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testUserDefinedTemporarySystemFunction() throws Exception { TableEnvironment tableEnv = getTableEnvironment(); String functionDDL = "create temporary system function addOne as " + "'" + TEST_FUNCTION + "'"; String dropFunctionDDL = "drop temporary system function addOne"; testUserDefinedCatalogFunction(tableEnv, functionDDL); // delete the function tableEnv.sqlUpdate(dropFunctionDDL); }
Example 7
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
private void testUserDefinedCatalogFunction(TableEnvironment tableEnv, String createFunctionDDL) throws Exception { List<Row> sourceData = Arrays.asList( Row.of(1, "1000", 2), Row.of(2, "1", 3), Row.of(3, "2000", 4), Row.of(1, "2", 2), Row.of(2, "3000", 3) ); TestCollectionTableFactory.reset(); TestCollectionTableFactory.initData(sourceData, new ArrayList<>(), -1); String sourceDDL = "create table t1(a int, b varchar, c int) with ('connector' = 'COLLECTION')"; String sinkDDL = "create table t2(a int, b varchar, c int) with ('connector' = 'COLLECTION')"; String query = " insert into t2 select t1.a, t1.b, addOne(t1.a, 1) as c from t1"; tableEnv.sqlUpdate(sourceDDL); tableEnv.sqlUpdate(sinkDDL); tableEnv.sqlUpdate(createFunctionDDL); tableEnv.sqlUpdate(query); tableEnv.execute("Test job"); Row[] result = TestCollectionTableFactory.RESULT().toArray(new Row[0]); Row[] expected = sourceData.toArray(new Row[0]); assertArrayEquals(expected, result); tableEnv.sqlUpdate("drop table t1"); tableEnv.sqlUpdate("drop table t2"); }
Example 8
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testUserDefinedRegularCatalogFunction() throws Exception { TableEnvironment tableEnv = getTableEnvironment(); String functionDDL = "create function addOne as " + "'" + TEST_FUNCTION + "'"; String dropFunctionDDL = "drop function addOne"; testUserDefinedCatalogFunction(tableEnv, functionDDL); // delete the function tableEnv.sqlUpdate(dropFunctionDDL); }
Example 9
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateDropTemporaryCatalogFunctionsWithDifferentIdentifier() { TableEnvironment tableEnv = getTableEnvironment(); String createNoCatalogDB = "create temporary function f4" + " as '" + TEST_FUNCTION + "'"; String dropNoCatalogDB = "drop temporary function f4"; tableEnv.sqlUpdate(createNoCatalogDB); tableEnv.sqlUpdate(dropNoCatalogDB); String createNonExistsCatalog = "create temporary function catalog1.default_database.f4" + " as '" + TEST_FUNCTION + "'"; String dropNonExistsCatalog = "drop temporary function catalog1.default_database.f4"; tableEnv.sqlUpdate(createNonExistsCatalog); tableEnv.sqlUpdate(dropNonExistsCatalog); String createNonExistsDB = "create temporary function default_catalog.db1.f4" + " as '" + TEST_FUNCTION + "'"; String dropNonExistsDB = "drop temporary function default_catalog.db1.f4"; tableEnv.sqlUpdate(createNonExistsDB); tableEnv.sqlUpdate(dropNonExistsDB); }
Example 10
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAlterTemporarySystemFunction() { TableEnvironment tableEnv = getTableEnvironment(); String alterTemporary = "ALTER TEMPORARY SYSTEM FUNCTION default_catalog.default_database.f4" + " as 'org.apache.flink.function.TestFunction'"; try { tableEnv.sqlUpdate(alterTemporary); fail(); } catch (Exception e) { assertEquals("Alter temporary system function is not supported", e.getMessage()); } }
Example 11
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAlterTemporaryCatalogFunction() { TableEnvironment tableEnv = getTableEnvironment(); String alterTemporary = "ALTER TEMPORARY FUNCTION default_catalog.default_database.f4" + " as 'org.apache.flink.function.TestFunction'"; try { tableEnv.sqlUpdate(alterTemporary); fail(); } catch (Exception e) { assertEquals("Alter temporary catalog function is not supported", e.getMessage()); } }
Example 12
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateTemporarySystemFunction() { TableEnvironment tableEnv = getTableEnvironment(); String ddl1 = "create temporary system function f5" + " as '" + TEST_FUNCTION + "'"; String ddl2 = "create temporary system function if not exists f5" + " as 'org.apache.flink.table.runtime.stream.sql.FunctionITCase$TestUDF'"; String ddl3 = "drop temporary system function f5"; tableEnv.sqlUpdate(ddl1); tableEnv.sqlUpdate(ddl2); tableEnv.sqlUpdate(ddl3); }
Example 13
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateFunctionDBNotExists() { TableEnvironment tableEnv = getTableEnvironment(); String ddl1 = "create function default_catalog.database1.f3 as 'org.apache.flink.function.TestFunction'"; try { tableEnv.sqlUpdate(ddl1); } catch (Exception e){ assertEquals(e.getMessage(), "Could not execute CREATE CATALOG FUNCTION:" + " (catalogFunction: [Optional[This is a user-defined function]], identifier:" + " [`default_catalog`.`database1`.`f3`], ignoreIfExists: [false], isTemporary: [false])"); } }
Example 14
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateFunctionCatalogNotExists() { TableEnvironment tableEnv = getTableEnvironment(); String ddl1 = "create function catalog1.database1.f3 as 'org.apache.flink.function.TestFunction'"; try { tableEnv.sqlUpdate(ddl1); } catch (Exception e){ assertEquals("Catalog catalog1 does not exist", e.getMessage()); } }
Example 15
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateFunctionWithoutCatalogIdentifier() { TableEnvironment tableEnv = getTableEnvironment(); String ddl1 = "create function default_database.f3 as" + " 'org.apache.flink.function.TestFunction'"; tableEnv.sqlUpdate(ddl1); assertTrue(Arrays.asList(tableEnv.listFunctions()).contains("f3")); tableEnv.sqlUpdate("DROP FUNCTION IF EXISTS default_catalog.default_database.f3"); assertFalse(Arrays.asList(tableEnv.listFunctions()).contains("f3")); }
Example 16
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateFunctionWithFullPath() { TableEnvironment tableEnv = getTableEnvironment(); String ddl1 = "create function default_catalog.default_database.f2 as" + " 'org.apache.flink.function.TestFunction'"; tableEnv.sqlUpdate(ddl1); assertTrue(Arrays.asList(tableEnv.listFunctions()).contains("f2")); tableEnv.sqlUpdate("DROP FUNCTION IF EXISTS default_catalog.default_database.f2"); assertFalse(Arrays.asList(tableEnv.listFunctions()).contains("f2")); }
Example 17
Source File: FunctionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateCatalogFunctionInDefaultCatalog() { TableEnvironment tableEnv = getTableEnvironment(); String ddl1 = "create function f1 as 'org.apache.flink.function.TestFunction'"; tableEnv.sqlUpdate(ddl1); assertTrue(Arrays.asList(tableEnv.listFunctions()).contains("f1")); tableEnv.sqlUpdate("DROP FUNCTION IF EXISTS default_catalog.default_database.f1"); assertFalse(Arrays.asList(tableEnv.listFunctions()).contains("f1")); }
Example 18
Source File: TableEnvHiveConnectorTest.java From flink with Apache License 2.0 | 5 votes |
private void readWriteFormat(String format) throws Exception { TableEnvironment tableEnv = getTableEnvWithHiveCatalog(); hiveShell.execute("create database db1"); // create source and dest tables String suffix; if (format.equals("csv")) { suffix = "row format serde 'org.apache.hadoop.hive.serde2.OpenCSVSerde'"; } else { suffix = "stored as " + format; } hiveShell.execute("create table db1.src (i int,s string) " + suffix); hiveShell.execute("create table db1.dest (i int,s string) " + suffix); // prepare source data with Hive hiveShell.execute("insert into db1.src values (1,'a'),(2,'b')"); // populate dest table with source table tableEnv.sqlUpdate("insert into db1.dest select * from db1.src"); tableEnv.execute("test_" + format); // verify data on hive side verifyHiveQueryResult("select * from db1.dest", Arrays.asList("1\ta", "2\tb")); hiveShell.execute("drop database db1 cascade"); }
Example 19
Source File: TableEnvHiveConnectorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testGetNonExistingFunction() throws Exception { hiveShell.execute("create database db1"); hiveShell.execute("create table db1.src (d double, s string)"); hiveShell.execute("create table db1.dest (x bigint)"); TableEnvironment tableEnv = getTableEnvWithHiveCatalog(); // just make sure the query runs through, no need to verify result tableEnv.sqlUpdate("insert into db1.dest select count(d) from db1.src"); tableEnv.execute("test"); hiveShell.execute("drop database db1 cascade"); }
Example 20
Source File: SqlConnect.java From flink-simple-tutorial with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { EnvironmentSettings settings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); // 创建一个使用 Blink Planner 的 TableEnvironment, 并工作在流模式 TableEnvironment tEnv = TableEnvironment.create(settings); String kafkaSourceSql = "CREATE TABLE log (\n" + " t INT, \n" + " user_name VARCHAR,\n" + " cnt INT\n" + ") WITH (\n" + " 'connector.type' = 'kafka',\n" + " 'connector.version' = 'universal',\n" + " 'connector.topic' = 'flink',\n" + " 'connector.startup-mode' = 'latest-offset',\n" + " 'connector.properties.0.key' = 'group.id',\n" + " 'connector.properties.0.value' = 'testGroup',\n" + " 'connector.properties.1.key' = 'bootstrap.servers',\n" + " 'connector.properties.1.value' = '192.168.56.103:9092',\n" + " 'connector.specific-offsets.0.partition' = '0',\n" + " 'connector.specific-offsets.0.offset' = '0',\n" + " 'update-mode' = 'append',\n" + " 'format.type' = 'json',\n" + " 'format.derive-schema' = 'true'\n" + ")"; String mysqlSinkSql = "CREATE TABLE sink (\n" + " t INT,\n" + " user_name VARCHAR,\n" + " total INT\n" + ") WITH (\n" + " 'connector.type' = 'jdbc',\n" + " 'connector.url' = 'jdbc:mysql://192.168.56.103:3306/flink',\n" + " 'connector.table' = 'log',\n" + " 'connector.username' = 'root',\n" + " 'connector.password' = '123456',\n" + " 'connector.write.flush.max-rows' = '1'\n" + ")"; // 1. 连接kafka构建源表 tEnv.sqlUpdate(kafkaSourceSql); // 2. 定义要输出的表 tEnv.sqlUpdate(mysqlSinkSql); // 3. 自定义具体的 DML 操作 tEnv.sqlUpdate("INSERT INTO sink " + "SELECT * from log where cnt=100"); tEnv.execute("SQL Job"); }