org.apache.flink.table.catalog.CatalogManager Java Examples
The following examples show how to use
org.apache.flink.table.catalog.CatalogManager.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TableEnvironmentImpl.java From flink with Apache License 2.0 | 6 votes |
protected TableEnvironmentImpl( CatalogManager catalogManager, TableConfig tableConfig, Executor executor, FunctionCatalog functionCatalog, Planner planner, boolean isStreamingMode) { this.catalogManager = catalogManager; this.execEnv = executor; this.tableConfig = tableConfig; this.functionCatalog = functionCatalog; this.planner = planner; this.operationTreeBuilder = OperationTreeBuilder.create( functionCatalog, path -> { Optional<CatalogQueryOperation> catalogTableOperation = scanInternal(path); return catalogTableOperation.map(tableOperation -> new TableReferenceExpression(path, tableOperation)); }, isStreamingMode ); }
Example #2
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 6 votes |
public TableEnvFactory(FlinkVersion flinkVersion, FlinkShims flinkShims, org.apache.flink.api.scala.ExecutionEnvironment env, org.apache.flink.streaming.api.scala.StreamExecutionEnvironment senv, TableConfig tblConfig, CatalogManager catalogManager, ModuleManager moduleManager, FunctionCatalog flinkFunctionCatalog, FunctionCatalog blinkFunctionCatalog) { this.flinkVersion = flinkVersion; this.flinkShims = flinkShims; this.benv = env; this.senv = senv; this.tblConfig = tblConfig; this.catalogManager = catalogManager; this.moduleManager = moduleManager; this.flinkFunctionCatalog = flinkFunctionCatalog; this.blinkFunctionCatalog = blinkFunctionCatalog; }
Example #3
Source File: StreamTableEnvironmentImplTest.java From flink with Apache License 2.0 | 6 votes |
private StreamTableEnvironmentImpl getStreamTableEnvironment( StreamExecutionEnvironment env, DataStreamSource<Integer> elements) { TableConfig config = new TableConfig(); CatalogManager catalogManager = CatalogManagerMocks.createEmptyCatalogManager(); ModuleManager moduleManager = new ModuleManager(); return new StreamTableEnvironmentImpl( catalogManager, moduleManager, new FunctionCatalog(config, catalogManager, moduleManager), config, env, new TestPlanner(elements.getTransformation()), new ExecutorMock(), true ); }
Example #4
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 6 votes |
public TableEnvironment createScalaFlinkBatchTableEnvironment() { try { Class clazz = null; if (flinkVersion.isFlink110()) { clazz = Class .forName("org.apache.flink.table.api.scala.internal.BatchTableEnvironmentImpl"); } else { clazz = Class .forName("org.apache.flink.table.api.bridge.scala.internal.BatchTableEnvironmentImpl"); } Constructor constructor = clazz .getConstructor( org.apache.flink.api.scala.ExecutionEnvironment.class, TableConfig.class, CatalogManager.class, ModuleManager.class); return (TableEnvironment) constructor.newInstance(benv, tblConfig, catalogManager, moduleManager); } catch (Exception e) { throw new TableException("Fail to createScalaFlinkBatchTableEnvironment", e); } }
Example #5
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 6 votes |
public TableEnvironment createJavaFlinkBatchTableEnvironment() { try { Class<?> clazz = null; if (flinkVersion.isFlink110()) { clazz = Class .forName("org.apache.flink.table.api.java.internal.BatchTableEnvironmentImpl"); } else { clazz = Class .forName("org.apache.flink.table.api.bridge.java.internal.BatchTableEnvironmentImpl"); } Constructor con = clazz.getConstructor( ExecutionEnvironment.class, TableConfig.class, CatalogManager.class, ModuleManager.class); return (TableEnvironment) con.newInstance( benv.getJavaEnv(), tblConfig, catalogManager, moduleManager); } catch (Throwable t) { throw new TableException("Create BatchTableEnvironment failed.", t); } }
Example #6
Source File: SqlCreateTableConverter.java From flink with Apache License 2.0 | 6 votes |
private CatalogTable lookupLikeSourceTable(SqlTableLike sqlTableLike) { UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlTableLike.getSourceTable() .toString()); ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); CatalogManager.TableLookupResult lookupResult = catalogManager.getTable(identifier) .orElseThrow(() -> new ValidationException(String.format( "Source table '%s' of the LIKE clause not found in the catalog, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition()))); if (!(lookupResult.getTable() instanceof CatalogTable)) { throw new ValidationException(String.format( "Source table '%s' of the LIKE clause can not be a VIEW, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition())); } return (CatalogTable) lookupResult.getTable(); }
Example #7
Source File: FlinkCalciteCatalogReaderTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testGetFlinkPreparingTableBase() { // Mock CatalogSchemaTable. TableSchema schema = TableSchema.builder().build(); CatalogSchemaTable mockTable = new CatalogSchemaTable( ObjectIdentifier.of("a", "b", "c"), CatalogManager.TableLookupResult.permanent(ConnectorCatalogTable.source( new TestTableSource(true, schema), true), schema), FlinkStatistic.UNKNOWN(), null, true); rootSchemaPlus.add(tableMockName, mockTable); Prepare.PreparingTable preparingTable = catalogReader .getTable(Collections.singletonList(tableMockName)); assertTrue(preparingTable instanceof FlinkPreparingTableBase); }
Example #8
Source File: ExecutionContext.java From flink with Apache License 2.0 | 6 votes |
private static TableEnvironment createStreamTableEnvironment( StreamExecutionEnvironment env, EnvironmentSettings settings, TableConfig config, Executor executor, CatalogManager catalogManager, ModuleManager moduleManager, FunctionCatalog functionCatalog) { final Map<String, String> plannerProperties = settings.toPlannerProperties(); final Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, config, functionCatalog, catalogManager); return new StreamTableEnvironmentImpl( catalogManager, moduleManager, functionCatalog, config, env, planner, executor, settings.isStreamingMode()); }
Example #9
Source File: TableEnvironmentImpl.java From flink with Apache License 2.0 | 6 votes |
public static TableEnvironmentImpl create(EnvironmentSettings settings) { CatalogManager catalogManager = new CatalogManager( settings.getBuiltInCatalogName(), new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName())); FunctionCatalog functionCatalog = new FunctionCatalog(catalogManager); Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = ComponentFactoryService.find(ExecutorFactory.class, executorProperties) .create(executorProperties); TableConfig tableConfig = new TableConfig(); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager); return new TableEnvironmentImpl( catalogManager, tableConfig, executor, functionCatalog, planner, settings.isStreamingMode() ); }
Example #10
Source File: StreamPlannerFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public Planner create( Map<String, String> properties, Executor executor, TableConfig tableConfig, FunctionCatalog functionCatalog, CatalogManager catalogManager) { return new StreamPlanner(executor, tableConfig, functionCatalog, catalogManager); }
Example #11
Source File: TableEnvironmentTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testConnect() { final TableEnvironmentMock tableEnv = TableEnvironmentMock.getStreamingInstance(); tableEnv .connect(new ConnectorDescriptorMock(TableSourceFactoryMock.CONNECTOR_TYPE_VALUE, 1, true)) .withFormat(new FormatDescriptorMock("my_format", 1)) .withSchema(new Schema() .field("my_field_0", "INT") .field("my_field_1", "BOOLEAN") .field("my_part_1", "BIGINT") .field("my_part_2", "STRING")) .withPartitionKeys(Arrays.asList("my_part_1", "my_part_2")) .inAppendMode() .createTemporaryTable("my_table"); CatalogManager.TableLookupResult lookupResult = tableEnv.catalogManager.getTable(ObjectIdentifier.of( EnvironmentSettings.DEFAULT_BUILTIN_CATALOG, EnvironmentSettings.DEFAULT_BUILTIN_DATABASE, "my_table")) .orElseThrow(AssertionError::new); assertThat(lookupResult.isTemporary(), equalTo(true)); CatalogBaseTable catalogBaseTable = lookupResult.getTable(); assertTrue(catalogBaseTable instanceof CatalogTable); CatalogTable table = (CatalogTable) catalogBaseTable; assertCatalogTable(table); assertCatalogTable(CatalogTableImpl.fromProperties(table.toProperties())); }
Example #12
Source File: TableEnvironmentMock.java From flink with Apache License 2.0 | 5 votes |
protected TableEnvironmentMock( CatalogManager catalogManager, ModuleManager moduleManager, TableConfig tableConfig, ExecutorMock executor, FunctionCatalog functionCatalog, PlannerMock planner, boolean isStreamingMode) { super(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, isStreamingMode); this.catalogManager = catalogManager; this.executor = executor; this.functionCatalog = functionCatalog; this.planner = planner; }
Example #13
Source File: TableEnvironmentMock.java From flink with Apache License 2.0 | 5 votes |
private static TableEnvironmentMock getInstance(boolean isStreamingMode) { final TableConfig config = createTableConfig(); final CatalogManager catalogManager = CatalogManagerMocks.createEmptyCatalogManager(); final ModuleManager moduleManager = new ModuleManager(); return new TableEnvironmentMock( catalogManager, moduleManager, config, createExecutor(), createFunctionCatalog(config, catalogManager, moduleManager), createPlanner(), isStreamingMode); }
Example #14
Source File: CatalogManagerMocks.java From flink with Apache License 2.0 | 5 votes |
public static CatalogManager.Builder preparedCatalogManager() { return CatalogManager.newBuilder() .classLoader(CatalogManagerMocks.class.getClassLoader()) .config(new Configuration()) .defaultCatalog(DEFAULT_CATALOG, new GenericInMemoryCatalog(DEFAULT_CATALOG, DEFAULT_DATABASE)) .executionConfig(new ExecutionConfig()); }
Example #15
Source File: SqlToOperationConverter.java From flink with Apache License 2.0 | 5 votes |
/** convert ALTER TABLE statement. */ private Operation convertAlterTable(SqlAlterTable sqlAlterTable) { UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName()); ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); if (sqlAlterTable instanceof SqlAlterTableRename) { UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName()); ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier); return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier); } else if (sqlAlterTable instanceof SqlAlterTableProperties) { Optional<CatalogManager.TableLookupResult> optionalCatalogTable = catalogManager.getTable(tableIdentifier); if (optionalCatalogTable.isPresent() && !optionalCatalogTable.get().isTemporary()) { CatalogTable originalCatalogTable = (CatalogTable) optionalCatalogTable.get().getTable(); Map<String, String> properties = new HashMap<>(); properties.putAll(originalCatalogTable.getProperties()); ((SqlAlterTableProperties) sqlAlterTable).getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString())); CatalogTable catalogTable = new CatalogTableImpl( originalCatalogTable.getSchema(), originalCatalogTable.getPartitionKeys(), properties, originalCatalogTable.getComment()); return new AlterTablePropertiesOperation(tableIdentifier, catalogTable); } else { throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.", tableIdentifier.toString())); } } else { throw new ValidationException( String.format("[%s] needs to implement", sqlAlterTable.toSqlString(CalciteSqlDialect.DEFAULT))); } }
Example #16
Source File: TestPlannerFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public Planner create( Map<String, String> properties, Executor executor, TableConfig tableConfig, FunctionCatalog functionCatalog, CatalogManager catalogManager) { return null; }
Example #17
Source File: TableEnvironmentImpl.java From flink with Apache License 2.0 | 5 votes |
protected TableEnvironmentImpl( CatalogManager catalogManager, ModuleManager moduleManager, TableConfig tableConfig, Executor executor, FunctionCatalog functionCatalog, Planner planner, boolean isStreamingMode) { this.catalogManager = catalogManager; this.catalogManager.setCatalogTableSchemaResolver( new CatalogTableSchemaResolver(planner.getParser(), isStreamingMode)); this.moduleManager = moduleManager; this.execEnv = executor; this.tableConfig = tableConfig; this.functionCatalog = functionCatalog; this.planner = planner; this.parser = planner.getParser(); this.isStreamingMode = isStreamingMode; this.operationTreeBuilder = OperationTreeBuilder.create( tableConfig, functionCatalog.asLookup(parser::parseIdentifier), catalogManager.getDataTypeFactory(), path -> { try { UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path); Optional<CatalogQueryOperation> catalogQueryOperation = scanInternal(unresolvedIdentifier); return catalogQueryOperation.map(t -> ApiExpressionUtils.tableRef(path, t)); } catch (SqlParserException ex) { // The TableLookup is used during resolution of expressions and it actually might not be an // identifier of a table. It might be a reference to some other object such as column, local // reference etc. This method should return empty optional in such cases to fallback for other // identifiers resolution. return Optional.empty(); } }, isStreamingMode ); }
Example #18
Source File: TableEnvFactory.java From zeppelin with Apache License 2.0 | 5 votes |
public TableEnvironment createJavaBlinkStreamTableEnvironment(EnvironmentSettings settings) { try { Map<String, String> executorProperties = settings.toExecutorProperties(); Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv()); Map<String, String> plannerProperties = settings.toPlannerProperties(); Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties) .create(plannerProperties, executor, tblConfig, blinkFunctionCatalog, catalogManager); Class clazz = null; if (flinkVersion.isFlink110()) { clazz = Class .forName("org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl"); } else { clazz = Class .forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl"); } Constructor constructor = clazz .getConstructor( CatalogManager.class, ModuleManager.class, FunctionCatalog.class, TableConfig.class, org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class, Planner.class, Executor.class, boolean.class); return (TableEnvironment) constructor.newInstance(catalogManager, moduleManager, blinkFunctionCatalog, tblConfig, senv.getJavaEnv(), planner, executor, settings.isStreamingMode()); } catch (Exception e) { throw new TableException("Fail to createJavaBlinkStreamTableEnvironment", e); } }
Example #19
Source File: StreamTableEnvironmentImpl.java From flink with Apache License 2.0 | 5 votes |
public StreamTableEnvironmentImpl( CatalogManager catalogManager, ModuleManager moduleManager, FunctionCatalog functionCatalog, TableConfig tableConfig, StreamExecutionEnvironment executionEnvironment, Planner planner, Executor executor, boolean isStreamingMode) { super(catalogManager, moduleManager, tableConfig, executor, functionCatalog, planner, isStreamingMode); this.executionEnvironment = executionEnvironment; }
Example #20
Source File: BatchTableEnvironment.java From flink with Apache License 2.0 | 5 votes |
/** * Returns a {@link TableEnvironment} for a Java batch {@link ExecutionEnvironment} that works * with {@link DataSet}s. * * <p>A TableEnvironment can be used to: * <ul> * <li>convert a {@link DataSet} to a {@link Table}</li> * <li>register a {@link DataSet} in the {@link TableEnvironment}'s catalog</li> * <li>register a {@link Table} in the {@link TableEnvironment}'s catalog</li> * <li>scan a registered table to obtain a {@link Table}</li> * <li>specify a SQL query on registered tables to obtain a {@link Table}</li> * <li>convert a {@link Table} into a {@link DataSet}</li> * <li>explain the AST and execution plan of a {@link Table}</li> * </ul> * * @param executionEnvironment The Java batch {@link ExecutionEnvironment} of the TableEnvironment. * @param tableConfig The configuration of the TableEnvironment. */ static BatchTableEnvironment create(ExecutionEnvironment executionEnvironment, TableConfig tableConfig) { try { // temporary solution until FLINK-15635 is fixed ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); ModuleManager moduleManager = new ModuleManager(); String defaultCatalog = "default_catalog"; CatalogManager catalogManager = CatalogManager.newBuilder() .classLoader(classLoader) .config(tableConfig.getConfiguration()) .defaultCatalog( defaultCatalog, new GenericInMemoryCatalog(defaultCatalog, "default_database")) .executionConfig(executionEnvironment.getConfig()) .build(); Class<?> clazz = Class.forName("org.apache.flink.table.api.bridge.java.internal.BatchTableEnvironmentImpl"); Constructor<?> con = clazz.getConstructor( ExecutionEnvironment.class, TableConfig.class, CatalogManager.class, ModuleManager.class); return (BatchTableEnvironment) con.newInstance(executionEnvironment, tableConfig, catalogManager, moduleManager); } catch (Throwable t) { throw new TableException("Create BatchTableEnvironment failed.", t); } }
Example #21
Source File: DatabaseCalciteSchema.java From flink with Apache License 2.0 | 5 votes |
public DatabaseCalciteSchema( String databaseName, String catalogName, CatalogManager catalog, boolean isStreamingMode) { this.databaseName = databaseName; this.catalogName = catalogName; this.catalogManager = catalog; this.isStreamingMode = isStreamingMode; }
Example #22
Source File: CatalogCalciteSchema.java From flink with Apache License 2.0 | 5 votes |
public CatalogCalciteSchema( String catalogName, CatalogManager catalog, boolean isStreamingMode) { this.catalogName = catalogName; this.catalogManager = catalog; this.isStreamingMode = isStreamingMode; }
Example #23
Source File: SqlCreateTableConverter.java From flink with Apache License 2.0 | 5 votes |
SqlCreateTableConverter( FlinkCalciteSqlValidator sqlValidator, CatalogManager catalogManager, Function<SqlNode, String> escapeExpression, Consumer<SqlTableConstraint> validateTableConstraint) { this.mergeTableLikeUtil = new MergeTableLikeUtil( sqlValidator, escapeExpression); this.catalogManager = catalogManager; this.validateTableConstraint = validateTableConstraint; }
Example #24
Source File: SqlToOperationConverter.java From flink with Apache License 2.0 | 5 votes |
private SqlToOperationConverter( FlinkPlannerImpl flinkPlanner, CatalogManager catalogManager) { this.flinkPlanner = flinkPlanner; this.catalogManager = catalogManager; this.createTableConverter = new SqlCreateTableConverter( flinkPlanner.getOrCreateSqlValidator(), catalogManager, this::getQuotedSqlString, this::validateTableConstraint); }
Example #25
Source File: BlinkPlannerFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public Planner create( Map<String, String> properties, Executor executor, TableConfig tableConfig, FunctionCatalog functionCatalog, CatalogManager catalogManager) { if (Boolean.valueOf(properties.getOrDefault(EnvironmentSettings.STREAMING_MODE, "true"))) { return new StreamPlanner(executor, tableConfig, functionCatalog, catalogManager); } else { return new BatchPlanner(executor, tableConfig, functionCatalog, catalogManager); } }
Example #26
Source File: PlannerContext.java From flink with Apache License 2.0 | 5 votes |
public PlannerContext( TableConfig tableConfig, FunctionCatalog functionCatalog, CatalogManager catalogManager, CalciteSchema rootSchema, List<RelTraitDef> traitDefs) { this.tableConfig = tableConfig; this.context = new FlinkContextImpl( tableConfig, functionCatalog, catalogManager, this::createSqlExprToRexConverter); this.rootSchema = rootSchema; this.traitDefs = traitDefs; // Make a framework config to initialize the RelOptCluster instance, // caution that we can only use the attributes that can not be overwrite/configured // by user. this.frameworkConfig = createFrameworkConfig(); RelOptPlanner planner = new VolcanoPlanner(frameworkConfig.getCostFactory(), frameworkConfig.getContext()); planner.setExecutor(frameworkConfig.getExecutor()); for (RelTraitDef traitDef : frameworkConfig.getTraitDefs()) { planner.addRelTraitDef(traitDef); } this.cluster = FlinkRelOptClusterFactory.create(planner, new RexBuilder(typeFactory)); }
Example #27
Source File: ParserImpl.java From flink with Apache License 2.0 | 5 votes |
public ParserImpl( CatalogManager catalogManager, Supplier<FlinkPlannerImpl> validatorSupplier, Supplier<CalciteParser> calciteParserSupplier, Function<TableSchema, SqlExprToRexConverter> sqlExprToRexConverterCreator) { this.catalogManager = catalogManager; this.validatorSupplier = validatorSupplier; this.calciteParserSupplier = calciteParserSupplier; this.sqlExprToRexConverterCreator = sqlExprToRexConverterCreator; }
Example #28
Source File: PlannerMocks.java From flink with Apache License 2.0 | 5 votes |
public static FlinkPlannerImpl createDefaultPlanner() { final boolean isStreamingMode = false; TableConfig tableConfig = new TableConfig(); CatalogManager catalogManager = CatalogManagerMocks.createEmptyCatalogManager(); ModuleManager moduleManager = new ModuleManager(); FunctionCatalog functionCatalog = new FunctionCatalog( tableConfig, catalogManager, moduleManager); PlannerContext plannerContext = new PlannerContext( tableConfig, functionCatalog, catalogManager, asRootSchema(new CatalogManagerCalciteSchema(catalogManager, isStreamingMode)), new ArrayList<>()); FlinkPlannerImpl planner = plannerContext.createFlinkPlanner( catalogManager.getCurrentCatalog(), catalogManager.getCurrentDatabase()); Parser parser = new ParserImpl( catalogManager, () -> planner, planner::parser, t -> plannerContext.createSqlExprToRexConverter(plannerContext.getTypeFactory().buildRelNodeRowType(t)) ); catalogManager.setCatalogTableSchemaResolver(new CatalogTableSchemaResolver(parser, isStreamingMode)); return planner; }
Example #29
Source File: ExecutionContext.java From flink with Apache License 2.0 | 5 votes |
private void createTableEnvironment( EnvironmentSettings settings, TableConfig config, CatalogManager catalogManager, ModuleManager moduleManager, FunctionCatalog functionCatalog) { if (environment.getExecution().isStreamingPlanner()) { streamExecEnv = createStreamExecutionEnvironment(); execEnv = null; final Map<String, String> executorProperties = settings.toExecutorProperties(); executor = lookupExecutor(executorProperties, streamExecEnv); tableEnv = createStreamTableEnvironment( streamExecEnv, settings, config, executor, catalogManager, moduleManager, functionCatalog); } else if (environment.getExecution().isBatchPlanner()) { streamExecEnv = null; execEnv = ExecutionEnvironment.getExecutionEnvironment(); executor = null; tableEnv = new BatchTableEnvironmentImpl( execEnv, config, catalogManager, moduleManager); } else { throw new SqlExecutionException("Unsupported execution type specified."); } }
Example #30
Source File: ExecutionContext.java From flink with Apache License 2.0 | 5 votes |
private SessionState( CatalogManager catalogManager, ModuleManager moduleManager, FunctionCatalog functionCatalog) { this.catalogManager = catalogManager; this.moduleManager = moduleManager; this.functionCatalog = functionCatalog; }