Java Code Examples for org.apache.hadoop.mapreduce.v2.util.MRApps#setEnvFromInputString()
The following examples show how to use
org.apache.hadoop.mapreduce.v2.util.MRApps#setEnvFromInputString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MapReduceChildJVM.java From hadoop with Apache License 2.0 | 4 votes |
public static void setVMEnv(Map<String, String> environment, Task task) { JobConf conf = task.conf; // Add the env variables passed by the user String mapredChildEnv = getChildEnv(conf, task.isMapTask()); MRApps.setEnvFromInputString(environment, mapredChildEnv, conf); // Set logging level in the environment. // This is so that, if the child forks another "bin/hadoop" (common in // streaming) it will have the correct loglevel. environment.put( "HADOOP_ROOT_LOGGER", MRApps.getChildLogLevel(conf, task.isMapTask()) + ",console"); // TODO: The following is useful for instance in streaming tasks. Should be // set in ApplicationMaster's env by the RM. String hadoopClientOpts = System.getenv("HADOOP_CLIENT_OPTS"); if (hadoopClientOpts == null) { hadoopClientOpts = ""; } else { hadoopClientOpts = hadoopClientOpts + " "; } environment.put("HADOOP_CLIENT_OPTS", hadoopClientOpts); // setEnvFromInputString above will add env variable values from // mapredChildEnv to existing variables. We want to overwrite // HADOOP_ROOT_LOGGER and HADOOP_CLIENT_OPTS if the user set it explicitly. Map<String, String> tmpEnv = new HashMap<String, String>(); MRApps.setEnvFromInputString(tmpEnv, mapredChildEnv, conf); String[] keys = { "HADOOP_ROOT_LOGGER", "HADOOP_CLIENT_OPTS" }; for (String key : keys) { if (tmpEnv.containsKey(key)) { environment.put(key, tmpEnv.get(key)); } } // Add stdout/stderr env environment.put( MRJobConfig.STDOUT_LOGFILE_ENV, getTaskLogFile(TaskLog.LogName.STDOUT) ); environment.put( MRJobConfig.STDERR_LOGFILE_ENV, getTaskLogFile(TaskLog.LogName.STDERR) ); }
Example 2
Source File: MapReduceChildJVM.java From big-c with Apache License 2.0 | 4 votes |
public static void setVMEnv(Map<String, String> environment, Task task) { JobConf conf = task.conf; // Add the env variables passed by the user String mapredChildEnv = getChildEnv(conf, task.isMapTask()); MRApps.setEnvFromInputString(environment, mapredChildEnv, conf); // Set logging level in the environment. // This is so that, if the child forks another "bin/hadoop" (common in // streaming) it will have the correct loglevel. environment.put( "HADOOP_ROOT_LOGGER", MRApps.getChildLogLevel(conf, task.isMapTask()) + ",console"); // TODO: The following is useful for instance in streaming tasks. Should be // set in ApplicationMaster's env by the RM. String hadoopClientOpts = System.getenv("HADOOP_CLIENT_OPTS"); if (hadoopClientOpts == null) { hadoopClientOpts = ""; } else { hadoopClientOpts = hadoopClientOpts + " "; } environment.put("HADOOP_CLIENT_OPTS", hadoopClientOpts); // setEnvFromInputString above will add env variable values from // mapredChildEnv to existing variables. We want to overwrite // HADOOP_ROOT_LOGGER and HADOOP_CLIENT_OPTS if the user set it explicitly. Map<String, String> tmpEnv = new HashMap<String, String>(); MRApps.setEnvFromInputString(tmpEnv, mapredChildEnv, conf); String[] keys = { "HADOOP_ROOT_LOGGER", "HADOOP_CLIENT_OPTS" }; for (String key : keys) { if (tmpEnv.containsKey(key)) { environment.put(key, tmpEnv.get(key)); } } // Add stdout/stderr env environment.put( MRJobConfig.STDOUT_LOGFILE_ENV, getTaskLogFile(TaskLog.LogName.STDOUT) ); environment.put( MRJobConfig.STDERR_LOGFILE_ENV, getTaskLogFile(TaskLog.LogName.STDERR) ); }