org.apache.flink.configuration.GlobalConfiguration Java Examples
The following examples show how to use
org.apache.flink.configuration.GlobalConfiguration.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TaskManagerRunnerConfigurationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testLoadDynamicalProperties() throws IOException, FlinkParseException { final File tmpDir = temporaryFolder.newFolder(); final File confFile = new File(tmpDir, GlobalConfiguration.FLINK_CONF_FILENAME); final PrintWriter pw1 = new PrintWriter(confFile); final long managedMemory = 1024 * 1024 * 256; pw1.println(JobManagerOptions.ADDRESS.key() + ": localhost"); pw1.println(TaskManagerOptions.MANAGED_MEMORY_SIZE.key() + ": " + managedMemory + "b"); pw1.close(); final String jmHost = "host1"; final int jmPort = 12345; String[] args = new String[] { "--configDir", tmpDir.toString(), "-D" + JobManagerOptions.ADDRESS.key() + "=" + jmHost, "-D" + JobManagerOptions.PORT.key() + "=" + jmPort }; Configuration configuration = TaskManagerRunner.loadConfiguration(args); assertEquals(MemorySize.parse(managedMemory + "b"), configuration.get(TaskManagerOptions.MANAGED_MEMORY_SIZE)); assertEquals(jmHost, configuration.get(JobManagerOptions.ADDRESS)); assertEquals(jmPort, configuration.getInteger(JobManagerOptions.PORT)); }
Example #2
Source File: ClusterConfigurationInfo.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static ClusterConfigurationInfo from(Configuration config) { ClusterConfigurationInfo clusterConfig = new ClusterConfigurationInfo(config.keySet().size()); for (String key : config.keySet()) { String value = config.getString(key, null); // Mask key values which contain sensitive information if (value != null && GlobalConfiguration.isSensitive(key)) { value = GlobalConfiguration.HIDDEN_CONTENT; } clusterConfig.add(new ClusterConfigurationInfoEntry(key, value)); } return clusterConfig; }
Example #3
Source File: TaskManagerRunner.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@VisibleForTesting static Configuration loadConfiguration(String[] args) throws FlinkParseException { final CommandLineParser<ClusterConfiguration> commandLineParser = new CommandLineParser<>(new ClusterConfigurationParserFactory()); final ClusterConfiguration clusterConfiguration; try { clusterConfiguration = commandLineParser.parse(args); } catch (FlinkParseException e) { LOG.error("Could not parse the command line options.", e); commandLineParser.printHelp(TaskManagerRunner.class.getSimpleName()); throw e; } final Configuration dynamicProperties = ConfigurationUtils.createConfiguration(clusterConfiguration.getDynamicProperties()); return GlobalConfiguration.loadConfiguration(clusterConfiguration.getConfigDir(), dynamicProperties); }
Example #4
Source File: ClusterEntrypoint.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
protected static Configuration loadConfiguration(EntrypointClusterConfiguration entrypointClusterConfiguration) { final Configuration dynamicProperties = ConfigurationUtils.createConfiguration(entrypointClusterConfiguration.getDynamicProperties()); final Configuration configuration = GlobalConfiguration.loadConfiguration(entrypointClusterConfiguration.getConfigDir(), dynamicProperties); final int restPort = entrypointClusterConfiguration.getRestPort(); if (restPort >= 0) { configuration.setInteger(RestOptions.PORT, restPort); } final String hostname = entrypointClusterConfiguration.getHostname(); if (hostname != null) { configuration.setString(JobManagerOptions.ADDRESS, hostname); } return configuration; }
Example #5
Source File: FlinkYarnSessionCli.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static void main(final String[] args) { final String configurationDirectory = CliFrontend.getConfigurationDirectoryFromEnv(); final Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); int retCode; try { final FlinkYarnSessionCli cli = new FlinkYarnSessionCli( flinkConfiguration, configurationDirectory, "", ""); // no prefix for the YARN session SecurityUtils.install(new SecurityConfiguration(flinkConfiguration)); retCode = SecurityUtils.getInstalledContext().runSecured(() -> cli.run(args)); } catch (CliArgsException e) { retCode = handleCliArgsException(e); } catch (Throwable t) { final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class); retCode = handleError(strippedThrowable); } System.exit(retCode); }
Example #6
Source File: TaskManagerRunnerConfigurationTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testDefaultFsParameterLoading() throws Exception { try { final File tmpDir = temporaryFolder.newFolder(); final File confFile = new File(tmpDir, GlobalConfiguration.FLINK_CONF_FILENAME); final URI defaultFS = new URI("otherFS", null, "localhost", 1234, null, null, null); final PrintWriter pw1 = new PrintWriter(confFile); pw1.println("fs.default-scheme: " + defaultFS); pw1.close(); String[] args = new String[] {"--configDir", tmpDir.toString()}; Configuration configuration = TaskManagerRunner.loadConfiguration(args); FileSystem.initialize(configuration); assertEquals(defaultFS, FileSystem.getDefaultFsUri()); } finally { // reset FS settings FileSystem.initialize(new Configuration()); } }
Example #7
Source File: HadoopUtils.java From flink with Apache License 2.0 | 6 votes |
/** * Merge HadoopConfiguration into Configuration. This is necessary for the HDFS configuration. */ public static void mergeHadoopConf(Configuration hadoopConfig) { // we have to load the global configuration here, because the HadoopInputFormatBase does not // have access to a Flink configuration object org.apache.flink.configuration.Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); Configuration hadoopConf = org.apache.flink.api.java.hadoop.mapred.utils.HadoopUtils.getHadoopConfiguration(flinkConfiguration); for (Map.Entry<String, String> e : hadoopConf) { if (hadoopConfig.get(e.getKey()) == null) { hadoopConfig.set(e.getKey(), e.getValue()); } } }
Example #8
Source File: FlinkYarnSessionCli.java From flink with Apache License 2.0 | 6 votes |
public static void main(final String[] args) { final String configurationDirectory = CliFrontend.getConfigurationDirectoryFromEnv(); final Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); int retCode; try { final FlinkYarnSessionCli cli = new FlinkYarnSessionCli( flinkConfiguration, configurationDirectory, "", ""); // no prefix for the YARN session SecurityUtils.install(new SecurityConfiguration(flinkConfiguration)); retCode = SecurityUtils.getInstalledContext().runSecured(() -> cli.run(args)); } catch (CliArgsException e) { retCode = handleCliArgsException(e); } catch (Throwable t) { final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class); retCode = handleError(strippedThrowable); } System.exit(retCode); }
Example #9
Source File: TaskManagerRunner.java From flink with Apache License 2.0 | 6 votes |
@VisibleForTesting static Configuration loadConfiguration(String[] args) throws FlinkParseException { final CommandLineParser<ClusterConfiguration> commandLineParser = new CommandLineParser<>(new ClusterConfigurationParserFactory()); final ClusterConfiguration clusterConfiguration; try { clusterConfiguration = commandLineParser.parse(args); } catch (FlinkParseException e) { LOG.error("Could not parse the command line options.", e); commandLineParser.printHelp(TaskManagerRunner.class.getSimpleName()); throw e; } final Configuration dynamicProperties = ConfigurationUtils.createConfiguration(clusterConfiguration.getDynamicProperties()); return GlobalConfiguration.loadConfiguration(clusterConfiguration.getConfigDir(), dynamicProperties); }
Example #10
Source File: ClusterConfigurationInfo.java From flink with Apache License 2.0 | 6 votes |
public static ClusterConfigurationInfo from(Configuration config) { ClusterConfigurationInfo clusterConfig = new ClusterConfigurationInfo(config.keySet().size()); for (String key : config.keySet()) { String value = config.getString(key, null); // Mask key values which contain sensitive information if (value != null && GlobalConfiguration.isSensitive(key)) { value = GlobalConfiguration.HIDDEN_CONTENT; } clusterConfig.add(new ClusterConfigurationInfoEntry(key, value)); } return clusterConfig; }
Example #11
Source File: ClusterEntrypoint.java From flink with Apache License 2.0 | 6 votes |
protected static Configuration loadConfiguration(EntrypointClusterConfiguration entrypointClusterConfiguration) { final Configuration dynamicProperties = ConfigurationUtils.createConfiguration(entrypointClusterConfiguration.getDynamicProperties()); final Configuration configuration = GlobalConfiguration.loadConfiguration(entrypointClusterConfiguration.getConfigDir(), dynamicProperties); final int restPort = entrypointClusterConfiguration.getRestPort(); if (restPort >= 0) { configuration.setInteger(RestOptions.PORT, restPort); } final String hostname = entrypointClusterConfiguration.getHostname(); if (hostname != null) { configuration.setString(JobManagerOptions.ADDRESS, hostname); } return configuration; }
Example #12
Source File: FlinkPlanner.java From cascading-flink with Apache License 2.0 | 6 votes |
public FlinkPlanner(List<String> classPath) { super(); this.classPath = classPath; env.getConfig().disableSysoutLogging(); if (env.getParallelism() <= 0) { // load the default parallelism from config GlobalConfiguration.loadConfiguration(new File(CliFrontend.getConfigurationDirectoryFromEnv()).getAbsolutePath()); org.apache.flink.configuration.Configuration configuration = GlobalConfiguration.getConfiguration(); int parallelism = configuration.getInteger(ConfigConstants.DEFAULT_PARALLELISM_KEY, -1); if (parallelism <= 0) { throw new RuntimeException("Please set the default parallelism via the -p command-line flag"); } else { env.setParallelism(parallelism); } } }
Example #13
Source File: HadoopUtils.java From flink with Apache License 2.0 | 6 votes |
/** * Merge HadoopConfiguration into Configuration. This is necessary for the HDFS configuration. */ public static void mergeHadoopConf(Configuration hadoopConfig) { // we have to load the global configuration here, because the HadoopInputFormatBase does not // have access to a Flink configuration object org.apache.flink.configuration.Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); Configuration hadoopConf = org.apache.flink.api.java.hadoop.mapred.utils.HadoopUtils.getHadoopConfiguration(flinkConfiguration); for (Map.Entry<String, String> e : hadoopConf) { if (hadoopConfig.get(e.getKey()) == null) { hadoopConfig.set(e.getKey(), e.getValue()); } } }
Example #14
Source File: KubernetesEntrypointUtils.java From flink with Apache License 2.0 | 6 votes |
/** * For non-HA cluster, {@link JobManagerOptions#ADDRESS} has be set to Kubernetes service name on client side. See * {@link KubernetesClusterDescriptor#deployClusterInternal}. So the TaskManager will use service address to contact * with JobManager. * For HA cluster, {@link JobManagerOptions#ADDRESS} will be set to the pod ip address. The TaskManager use Zookeeper * or other high-availability service to find the address of JobManager. * * @return Updated configuration */ static Configuration loadConfiguration() { final String configDir = System.getenv(ConfigConstants.ENV_FLINK_CONF_DIR); Preconditions.checkNotNull( configDir, "Flink configuration directory (%s) in environment should not be null!", ConfigConstants.ENV_FLINK_CONF_DIR); final Configuration configuration = GlobalConfiguration.loadConfiguration(configDir); if (HighAvailabilityMode.isHighAvailabilityModeActivated(configuration)) { final String ipAddress = System.getenv().get(Constants.ENV_FLINK_POD_IP_ADDRESS); Preconditions.checkState( ipAddress != null, "JobManager ip address environment variable %s not set", Constants.ENV_FLINK_POD_IP_ADDRESS); configuration.setString(JobManagerOptions.ADDRESS, ipAddress); configuration.setString(RestOptions.ADDRESS, ipAddress); } return configuration; }
Example #15
Source File: FlinkYarnSessionCli.java From flink with Apache License 2.0 | 6 votes |
private String encodeDynamicProperties(final CommandLine cmd) { final Properties properties = cmd.getOptionProperties(dynamicproperties.getOpt()); final String[] dynamicProperties = properties.stringPropertyNames().stream() .flatMap( (String key) -> { final String value = properties.getProperty(key); LOG.info("Dynamic Property set: {}={}", key, GlobalConfiguration.isSensitive(key) ? GlobalConfiguration.HIDDEN_CONTENT : value); if (value != null) { return Stream.of(key + dynamicproperties.getValueSeparator() + value); } else { return Stream.empty(); } }) .toArray(String[]::new); return StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR); }
Example #16
Source File: TaskManagerRunnerConfigurationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testDefaultFsParameterLoading() throws Exception { try { final File tmpDir = temporaryFolder.newFolder(); final File confFile = new File(tmpDir, GlobalConfiguration.FLINK_CONF_FILENAME); final URI defaultFS = new URI("otherFS", null, "localhost", 1234, null, null, null); final PrintWriter pw1 = new PrintWriter(confFile); pw1.println("fs.default-scheme: " + defaultFS); pw1.close(); String[] args = new String[] {"--configDir", tmpDir.toString()}; Configuration configuration = TaskManagerRunner.loadConfiguration(args); FileSystem.initialize(configuration); assertEquals(defaultFS, FileSystem.getDefaultFsUri()); } finally { // reset FS settings FileSystem.initialize(new Configuration()); } }
Example #17
Source File: FlinkYarnSessionCli.java From flink with Apache License 2.0 | 6 votes |
public static void main(final String[] args) { final String configurationDirectory = CliFrontend.getConfigurationDirectoryFromEnv(); final Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); int retCode; try { final FlinkYarnSessionCli cli = new FlinkYarnSessionCli( flinkConfiguration, configurationDirectory, "", ""); // no prefix for the YARN session SecurityUtils.install(new SecurityConfiguration(flinkConfiguration)); retCode = SecurityUtils.getInstalledContext().runSecured(() -> cli.run(args)); } catch (CliArgsException e) { retCode = handleCliArgsException(e, LOG); } catch (Throwable t) { final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class); retCode = handleError(strippedThrowable, LOG); } System.exit(retCode); }
Example #18
Source File: ClusterEntrypoint.java From flink with Apache License 2.0 | 6 votes |
protected static Configuration loadConfiguration(EntrypointClusterConfiguration entrypointClusterConfiguration) { final Configuration dynamicProperties = ConfigurationUtils.createConfiguration(entrypointClusterConfiguration.getDynamicProperties()); final Configuration configuration = GlobalConfiguration.loadConfiguration(entrypointClusterConfiguration.getConfigDir(), dynamicProperties); final int restPort = entrypointClusterConfiguration.getRestPort(); if (restPort >= 0) { configuration.setInteger(RestOptions.PORT, restPort); } final String hostname = entrypointClusterConfiguration.getHostname(); if (hostname != null) { configuration.setString(JobManagerOptions.ADDRESS, hostname); } return configuration; }
Example #19
Source File: ConfigurationParserUtils.java From flink with Apache License 2.0 | 6 votes |
/** * Generate configuration from only the config file and dynamic properties. * @param args the commandline arguments * @param cmdLineSyntax the syntax for this application * @return generated configuration * @throws FlinkParseException if the configuration cannot be generated */ public static Configuration loadCommonConfiguration(String[] args, String cmdLineSyntax) throws FlinkParseException { final CommandLineParser<ClusterConfiguration> commandLineParser = new CommandLineParser<>(new ClusterConfigurationParserFactory()); final ClusterConfiguration clusterConfiguration; try { clusterConfiguration = commandLineParser.parse(args); } catch (FlinkParseException e) { LOG.error("Could not parse the command line options.", e); commandLineParser.printHelp(cmdLineSyntax); throw e; } final Configuration dynamicProperties = ConfigurationUtils.createConfiguration(clusterConfiguration.getDynamicProperties()); return GlobalConfiguration.loadConfiguration(clusterConfiguration.getConfigDir(), dynamicProperties); }
Example #20
Source File: TaskManagerRunnerConfigurationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testDefaultFsParameterLoading() throws Exception { try { final File tmpDir = temporaryFolder.newFolder(); final File confFile = new File(tmpDir, GlobalConfiguration.FLINK_CONF_FILENAME); final URI defaultFS = new URI("otherFS", null, "localhost", 1234, null, null, null); final PrintWriter pw1 = new PrintWriter(confFile); pw1.println("fs.default-scheme: " + defaultFS); pw1.close(); String[] args = new String[] {"--configDir", tmpDir.toString()}; Configuration configuration = TaskManagerRunner.loadConfiguration(args); FileSystem.initialize(configuration); assertEquals(defaultFS, FileSystem.getDefaultFsUri()); } finally { // reset FS settings FileSystem.initialize(new Configuration()); } }
Example #21
Source File: HadoopUtils.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Merge HadoopConfiguration into Configuration. This is necessary for the HDFS configuration. */ public static void mergeHadoopConf(Configuration hadoopConfig) { // we have to load the global configuration here, because the HadoopInputFormatBase does not // have access to a Flink configuration object org.apache.flink.configuration.Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); Configuration hadoopConf = org.apache.flink.api.java.hadoop.mapred.utils.HadoopUtils.getHadoopConfiguration(flinkConfiguration); for (Map.Entry<String, String> e : hadoopConf) { if (hadoopConfig.get(e.getKey()) == null) { hadoopConfig.set(e.getKey(), e.getValue()); } } }
Example #22
Source File: MesosEntrypointUtils.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Loads the global configuration, adds the given dynamic properties configuration, and sets * the temp directory paths. * * @param dynamicProperties dynamic properties to integrate * @param log logger instance * @return the loaded and adapted global configuration */ public static Configuration loadConfiguration(Configuration dynamicProperties, Logger log) { Configuration configuration = GlobalConfiguration.loadConfigurationWithDynamicProperties(dynamicProperties); // read the environment variables final Map<String, String> envs = System.getenv(); final String tmpDirs = envs.get(MesosConfigKeys.ENV_FLINK_TMP_DIR); BootstrapTools.updateTmpDirectoriesInConfiguration(configuration, tmpDirs); return configuration; }
Example #23
Source File: CliFrontend.java From flink with Apache License 2.0 | 5 votes |
/** * Submits the job based on the arguments. */ public static void main(final String[] args) { EnvironmentInformation.logEnvironmentInfo(LOG, "Command Line Client", args); // 1. find the configuration directory final String configurationDirectory = getConfigurationDirectoryFromEnv(); // 2. load the global configuration final Configuration configuration = GlobalConfiguration.loadConfiguration(configurationDirectory); // 3. load the custom command lines final List<CustomCommandLine> customCommandLines = loadCustomCommandLines( configuration, configurationDirectory); try { final CliFrontend cli = new CliFrontend( configuration, customCommandLines); SecurityUtils.install(new SecurityConfiguration(cli.configuration)); int retCode = SecurityUtils.getInstalledContext() .runSecured(() -> cli.parseParameters(args)); System.exit(retCode); } catch (Throwable t) { final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class); LOG.error("Fatal error while running command line interface.", strippedThrowable); strippedThrowable.printStackTrace(); System.exit(31); } }
Example #24
Source File: PythonStreamBinder.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Entry point for the execution of a python streaming task. * * @param args pathToScript [pathToPackage1 .. [pathToPackageX]] - [parameter1]..[parameterX] * @throws Exception */ public static void main(String[] args) throws Exception { Configuration globalConfig = GlobalConfiguration.loadConfiguration(); PythonStreamBinder binder = new PythonStreamBinder(globalConfig); try { binder.runPlan(args); } catch (Exception e) { System.out.println("Failed to run plan: " + e.getMessage()); e.printStackTrace(); LOG.error("Failed to run plan.", e); } }
Example #25
Source File: StandaloneJobClusterConfigurationParserFactoryTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Before public void createEmptyFlinkConfiguration() throws IOException { File confDir = tempFolder.getRoot(); confDirPath = confDir.getAbsolutePath(); confFile = new File(confDir, GlobalConfiguration.FLINK_CONF_FILENAME); confFile.createNewFile(); }
Example #26
Source File: HadoopUtils.java From flink with Apache License 2.0 | 5 votes |
/** * Merge HadoopConfiguration into JobConf. This is necessary for the HDFS configuration. */ public static void mergeHadoopConf(JobConf jobConf) { // we have to load the global configuration here, because the HadoopInputFormatBase does not // have access to a Flink configuration object org.apache.flink.configuration.Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); Configuration hadoopConf = getHadoopConfiguration(flinkConfiguration); for (Map.Entry<String, String> e : hadoopConf) { if (jobConf.get(e.getKey()) == null) { jobConf.set(e.getKey(), e.getValue()); } } }
Example #27
Source File: HistoryServer.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { ParameterTool pt = ParameterTool.fromArgs(args); String configDir = pt.getRequired("configDir"); LOG.info("Loading configuration from {}", configDir); final Configuration flinkConfig = GlobalConfiguration.loadConfiguration(configDir); try { FileSystem.initialize(flinkConfig); } catch (IOException e) { throw new Exception("Error while setting the default filesystem scheme from configuration.", e); } // run the history server SecurityUtils.install(new SecurityConfiguration(flinkConfig)); try { SecurityUtils.getInstalledContext().runSecured(new Callable<Integer>() { @Override public Integer call() throws Exception { HistoryServer hs = new HistoryServer(flinkConfig); hs.run(); return 0; } }); System.exit(0); } catch (Throwable t) { final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class); LOG.error("Failed to run HistoryServer.", strippedThrowable); strippedThrowable.printStackTrace(); System.exit(1); } }
Example #28
Source File: StreamPlanEnvironment.java From flink with Apache License 2.0 | 5 votes |
protected StreamPlanEnvironment(ExecutionEnvironment env) { super(); this.env = env; int parallelism = env.getParallelism(); if (parallelism > 0) { setParallelism(parallelism); } else { // determine parallelism setParallelism(GlobalConfiguration.loadConfiguration().getInteger(CoreOptions.DEFAULT_PARALLELISM)); } }
Example #29
Source File: FlinkDistribution.java From flink with Apache License 2.0 | 5 votes |
@Override public void before() throws IOException { defaultConfig = new UnmodifiableConfiguration(GlobalConfiguration.loadConfiguration(conf.toAbsolutePath().toString())); final Path originalConfig = conf.resolve(FLINK_CONF_YAML); final Path backupConfig = conf.resolve(FLINK_CONF_YAML_BACKUP); Files.copy(originalConfig, backupConfig); filesToDelete.add(new AutoClosablePath(backupConfig)); }
Example #30
Source File: MesosUtils.java From flink with Apache License 2.0 | 5 votes |
/** * Loads the global configuration, adds the given dynamic properties configuration, and sets * the temp directory paths. * * @param dynamicProperties dynamic properties to integrate * @param log logger instance * @return the loaded and adapted global configuration */ public static Configuration loadConfiguration(Configuration dynamicProperties, Logger log) { Configuration configuration = GlobalConfiguration.loadConfiguration(dynamicProperties); // read the environment variables final Map<String, String> envs = System.getenv(); final String tmpDirs = envs.get(MesosConfigKeys.ENV_FLINK_TMP_DIR); BootstrapTools.updateTmpDirectoriesInConfiguration(configuration, tmpDirs); return configuration; }