Java Code Examples for org.apache.flink.table.api.EnvironmentSettings#toPlannerProperties()
The following examples show how to use
org.apache.flink.table.api.EnvironmentSettings#toPlannerProperties() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TableEnvironmentImpl.java From flink with Apache License 2.0 | 6 votes |
public static TableEnvironmentImpl create(EnvironmentSettings settings) { CatalogManager catalogManager = new CatalogManager( settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())); FunctionCatalog functionCatalog = new FunctionCatalog(catalogManager); Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = ComponentFactoryService.find(ExecutorFactory.class, executorProperties) .create(executorProperties); TableConfig tableConfig = new TableConfig(); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager); return new TableEnvironmentImpl( catalogManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode() ); }
Example 2
Source File: ExecutionContext.java From flink with Apache License 2.0 | 6 votes |
private static TableEnvironment createStreamTableEnvironment( StreamExecutionEnvironment env, EnvironmentSettings settings, TableConfig config, Executor executor, CatalogManager catalogManager, ModuleManager moduleManager, FunctionCatalog functionCatalog) { final Map<String, String> plannerProperties = settings.toPlannerProperties(); final Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, config, functionCatalog, catalogManager); return new StreamTableEnvironmentImpl( catalogManager, moduleManager, functionCatalog, config, env, planner, executor, settings.isStreamingMode()); }
Example 3
Source File: StreamTableEnvironmentImpl.java From flink with Apache License 2.0 | 5 votes |
public static StreamTableEnvironment create( StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings, TableConfig tableConfig) { if (!settings.isStreamingMode()) { throw new TableException( "StreamTableEnvironment can not run in batch mode for now, please use TableEnvironment."); } CatalogManager catalogManager = new CatalogManager( settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())); FunctionCatalog functionCatalog = new FunctionCatalog(catalogManager); Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, executionEnvironment); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager); return new StreamTableEnvironmentImpl( catalogManager, functionCatalog, tableConfig, executionEnvironment, planner, executor, settings.isStreamingMode() ); }
Example 4
Source File: ExecutionContext.java From flink with Apache License 2.0 | 5 votes |
private static TableEnvironment createStreamTableEnvironment( StreamExecutionEnvironment env, EnvironmentSettings settings, Executor executor) { final TableConfig config = TableConfig.getDefault(); final CatalogManager catalogManager = new CatalogManager( settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())); final FunctionCatalog functionCatalog = new FunctionCatalog(catalogManager); final Map<String, String> plannerProperties = settings.toPlannerProperties(); final Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, config, functionCatalog, catalogManager); return new StreamTableEnvironmentImpl( catalogManager, functionCatalog, config, env, planner, executor, settings.isStreamingMode() ); }
Example 5
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 5 votes |
public TableEnvironment createJavaBlinkStreamTableEnvironment(EnvironmentSettings settings) { try { Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv()); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, tblConfig, blinkFunctionCatalog, catalogManager); Class clazz = null; if (flinkVersion.isFlink110()) { clazz = Class .forName("org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl"); } else { clazz = Class .forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl"); } Constructor constructor = clazz .getConstructor( CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class); return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, blinkFunctionCatalog, tblConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode()); } catch (Exception e) { throw new TableException("Fail to createJavaBlinkStreamTableEnvironment", e); } }
Example 6
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 5 votes |
public TableEnvironment createJavaBlinkBatchTableEnvironment( EnvironmentSettings settings) { try { final Map<String, String> executorProperties = settings.toExecutorProperties(); executor = lookupExecutor(executorProperties, senv.getJavaEnv()); final Map<String, String> plannerProperties = settings.toPlannerProperties(); final Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, tblConfig, blinkFunctionCatalog, catalogManager); Class clazz = null; if (flinkVersion.isFlink110()) { clazz = Class .forName("org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl"); } else { clazz = Class .forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl"); } Constructor constructor = clazz.getConstructor( CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class); return (TableEnvironment) constructor.newInstance( catalogManager, moduleManager, blinkFunctionCatalog, tblConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode()); } catch (Exception e) { LOGGER.info(ExceptionUtils.getStackTrace(e)); throw new TableException("Fail to createJavaBlinkBatchTableEnvironment", e); } }
Example 7
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 5 votes |
public void createPlanner(EnvironmentSettings settings) { Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv()); Map<String, String> plannerProperties = settings.toPlannerProperties(); ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create( plannerProperties, executor, tblConfig, blinkFunctionCatalog, catalogManager); }
Example 8
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 4 votes |
public TableEnvironment createScalaFlinkStreamTableEnvironment(EnvironmentSettings settings) { try { Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv()); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create( plannerProperties, executor, tblConfig, flinkFunctionCatalog, catalogManager); Class clazz = null; if (flinkVersion.isFlink110()) { clazz = Class .forName("org.apache.flink.table.api.scala.internal.StreamTableEnvironmentImpl"); } else { clazz = Class .forName("org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl"); } Constructor constructor = clazz .getConstructor( CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class); return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, flinkFunctionCatalog, tblConfig, senv, planner, executor, settings.isStreamingMode()); } catch (Exception e) { throw new TableException("Fail to createScalaFlinkStreamTableEnvironment", e); } }
Example 9
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 4 votes |
public TableEnvironment createJavaFlinkStreamTableEnvironment(EnvironmentSettings settings) { try { Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv()); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, tblConfig, flinkFunctionCatalog, catalogManager); Class clazz = null; if (flinkVersion.isFlink110()) { clazz = Class .forName("org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl"); } else { clazz = Class .forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl"); } Constructor constructor = clazz .getConstructor( CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class); return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, flinkFunctionCatalog, tblConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode()); } catch (Exception e) { throw new TableException("Fail to createJavaFlinkStreamTableEnvironment", e); } }
Example 10
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 4 votes |
public TableEnvironment createScalaBlinkStreamTableEnvironment(EnvironmentSettings settings) { try { Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv()); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create( plannerProperties, executor, tblConfig, blinkFunctionCatalog, catalogManager); Class clazz = null; if (flinkVersion.isFlink110()) { clazz = Class .forName("org.apache.flink.table.api.scala.internal.StreamTableEnvironmentImpl"); } else { clazz = Class .forName("org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl"); } Constructor constructor = clazz .getConstructor( CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class); return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, blinkFunctionCatalog, tblConfig, senv, planner, executor, settings.isStreamingMode()); } catch (Exception e) { throw new TableException("Fail to createScalaBlinkStreamTableEnvironment", e); } }
Example 11
Source File: TableEnvironmentImpl.java From flink with Apache License 2.0 | 4 votes |
public static TableEnvironmentImpl create(EnvironmentSettings settings) { // temporary solution until FLINK-15635 is fixed ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); TableConfig tableConfig = new TableConfig(); ModuleManager moduleManager = new ModuleManager(); CatalogManager catalogManager = CatalogManager.newBuilder() .classLoader(classLoader) .config(tableConfig.getConfiguration()) .defaultCatalog( settings.getBuiltInCatalogName(), new GenericInMemoryCatalog( settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())) .build(); FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager); Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = ComponentFactoryService.find(ExecutorFactory.class, executorProperties) .create(executorProperties); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create( plannerProperties, executor, tableConfig, functionCatalog, catalogManager); return new TableEnvironmentImpl( catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode() ); }
Example 12
Source File: StreamTableEnvironmentImpl.java From flink with Apache License 2.0 | 4 votes |
public static StreamTableEnvironment create( StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings, TableConfig tableConfig) { if (!settings.isStreamingMode()) { throw new TableException( "StreamTableEnvironment can not run in batch mode for now, please use TableEnvironment."); } // temporary solution until FLINK-15635 is fixed ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); ModuleManager moduleManager = new ModuleManager(); CatalogManager catalogManager = CatalogManager.newBuilder() .classLoader(classLoader) .config(tableConfig.getConfiguration()) .defaultCatalog( settings.getBuiltInCatalogName(), new GenericInMemoryCatalog( settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())) .executionConfig(executionEnvironment.getConfig()) .build(); FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager); Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, executionEnvironment); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager); return new StreamTableEnvironmentImpl( catalogManager, moduleManager, functionCatalog, tableConfig, executionEnvironment, planner, executor, settings.isStreamingMode() ); }