Java Code Examples for org.apache.hadoop.conf.Configuration#setClassLoader()
The following examples show how to use
org.apache.hadoop.conf.Configuration#setClassLoader() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestWrappedRRClassloader.java From hadoop with Apache License 2.0 | 6 votes |
/** * Tests the class loader set by * {@link Configuration#setClassLoader(ClassLoader)} * is inherited by any {@link WrappedRecordReader}s created by * {@link CompositeRecordReader} */ public void testClassLoader() throws Exception { Configuration conf = new Configuration(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); conf.setClassLoader(classLoader); assertTrue(conf.getClassLoader() instanceof Fake_ClassLoader); FileSystem fs = FileSystem.get(conf); Path testdir = new Path(System.getProperty("test.build.data", "/tmp")) .makeQualified(fs); Path base = new Path(testdir, "/empty"); Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") }; conf.set(CompositeInputFormat.JOIN_EXPR, CompositeInputFormat.compose("outer", IF_ClassLoaderChecker.class, src)); CompositeInputFormat<NullWritable> inputFormat = new CompositeInputFormat<NullWritable>(); // create dummy TaskAttemptID TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.MAP, 0, 0); conf.set(MRJobConfig.TASK_ATTEMPT_ID, tid.toString()); inputFormat.createRecordReader (inputFormat.getSplits(Job.getInstance(conf)).get(0), new TaskAttemptContextImpl(conf, tid)); }
Example 2
Source File: TestWrappedRRClassloader.java From big-c with Apache License 2.0 | 6 votes |
/** * Tests the class loader set by * {@link Configuration#setClassLoader(ClassLoader)} * is inherited by any {@link WrappedRecordReader}s created by * {@link CompositeRecordReader} */ public void testClassLoader() throws Exception { Configuration conf = new Configuration(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); conf.setClassLoader(classLoader); assertTrue(conf.getClassLoader() instanceof Fake_ClassLoader); FileSystem fs = FileSystem.get(conf); Path testdir = new Path(System.getProperty("test.build.data", "/tmp")) .makeQualified(fs); Path base = new Path(testdir, "/empty"); Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") }; conf.set(CompositeInputFormat.JOIN_EXPR, CompositeInputFormat.compose("outer", IF_ClassLoaderChecker.class, src)); CompositeInputFormat<NullWritable> inputFormat = new CompositeInputFormat<NullWritable>(); // create dummy TaskAttemptID TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.MAP, 0, 0); conf.set(MRJobConfig.TASK_ATTEMPT_ID, tid.toString()); inputFormat.createRecordReader (inputFormat.getSplits(Job.getInstance(conf)).get(0), new TaskAttemptContextImpl(conf, tid)); }
Example 3
Source File: ParquetOutputPlugin.java From embulk-output-parquet with MIT License | 6 votes |
private Configuration createConfiguration(Map<String, String> extra, List<String> configFiles) { Configuration conf = new Configuration(); // Default values conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName()); conf.set("fs.file.impl", LocalFileSystem.class.getName()); for (String configFile : configFiles) { File file = new File(configFile); try { conf.addResource(file.toURI().toURL()); } catch (MalformedURLException e) { throw new ConfigException(e); } } // Optional values for (Map.Entry<String, String> entry : extra.entrySet()) { conf.set(entry.getKey(), entry.getValue()); } conf.setClassLoader(this.getClass().getClassLoader()); return conf; }
Example 4
Source File: AbstractHadoopProcessor.java From localization_nifi with Apache License 2.0 | 5 votes |
HdfsResources resetHDFSResources(String configResources, ProcessContext context) throws IOException { Configuration config = getConfigurationFromResources(configResources); config.setClassLoader(Thread.currentThread().getContextClassLoader()); // set the InstanceClassLoader // first check for timeout on HDFS connection, because FileSystem has a hard coded 15 minute timeout checkHdfsUriForTimeout(config); // disable caching of Configuration and FileSystem objects, else we cannot reconfigure the processor without a complete // restart String disableCacheName = String.format("fs.%s.impl.disable.cache", FileSystem.getDefaultUri(config).getScheme()); config.set(disableCacheName, "true"); // If kerberos is enabled, create the file system as the kerberos principal // -- use RESOURCE_LOCK to guarantee UserGroupInformation is accessed by only a single thread at at time FileSystem fs; UserGroupInformation ugi; synchronized (RESOURCES_LOCK) { if (SecurityUtil.isSecurityEnabled(config)) { String principal = context.getProperty(kerberosProperties.getKerberosPrincipal()).getValue(); String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab()).getValue(); ugi = SecurityUtil.loginKerberos(config, principal, keyTab); fs = getFileSystemAsUser(config, ugi); lastKerberosReloginTime = System.currentTimeMillis() / 1000; } else { config.set("ipc.client.fallback-to-simple-auth-allowed", "true"); config.set("hadoop.security.authentication", "simple"); ugi = SecurityUtil.loginSimple(config); fs = getFileSystemAsUser(config, ugi); } } getLogger().debug("resetHDFSResources UGI {}", new Object[]{ugi}); final Path workingDir = fs.getWorkingDirectory(); getLogger().info("Initialized a new HDFS File System with working dir: {} default block size: {} default replication: {} config: {}", new Object[]{workingDir, fs.getDefaultBlockSize(workingDir), fs.getDefaultReplication(workingDir), config.toString()}); return new HdfsResources(config, fs, ugi); }
Example 5
Source File: SqoopTool.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 5 votes |
/** * If $SQOOP_CONF_DIR/tools.d/ exists and sqoop.tool.plugins is not set, * then we look through the files in that directory; they should contain * lines of the form 'plugin.class.name[=/path/to/containing.jar]'. * * <p>Put all plugin.class.names into the Configuration, and load any * specified jars into the ClassLoader. * </p> * * @param conf the current configuration to populate with class names. * @return conf again, after possibly populating sqoop.tool.plugins. */ private static Configuration loadPluginsFromConfDir(Configuration conf) { if (conf.get(TOOL_PLUGINS_KEY) != null) { LOG.debug(TOOL_PLUGINS_KEY + " is set; ignoring tools.d"); return conf; } String confDirName = System.getenv("SQOOP_CONF_DIR"); if (null == confDirName) { LOG.warn("$SQOOP_CONF_DIR has not been set in the environment. " + "Cannot check for additional configuration."); return conf; } File confDir = new File(confDirName); File toolsDir = new File(confDir, "tools.d"); if (toolsDir.exists() && toolsDir.isDirectory()) { // We have a tools.d subdirectory. Get the file list, sort it, // and process them in order. String [] fileNames = toolsDir.list(); Arrays.sort(fileNames); for (String fileName : fileNames) { File f = new File(toolsDir, fileName); if (f.isFile()) { loadPluginsFromFile(conf, f); } } } // Set the classloader in this configuration so that it will use // the jars we just loaded in. conf.setClassLoader(Thread.currentThread().getContextClassLoader()); return conf; }
Example 6
Source File: MRApps.java From hadoop with Apache License 2.0 | 5 votes |
/** * Sets the provided classloader on the given configuration and as the thread * context classloader if the classloader is not null. * @param classLoader * @param conf */ public static void setClassLoader(ClassLoader classLoader, Configuration conf) { if (classLoader != null) { LOG.info("Setting classloader " + classLoader.getClass().getName() + " on the configuration and as the thread context classloader"); conf.setClassLoader(classLoader); Thread.currentThread().setContextClassLoader(classLoader); } }
Example 7
Source File: TestClassWithNoPackage.java From hadoop with Apache License 2.0 | 5 votes |
@Test public void testGoodClassOrNull() throws Exception { String NAME = "ClassWithNoPackage"; ClassLoader cl = TestClassWithNoPackage.class.getClassLoader(); String JAR = JarFinder.getJar(cl.loadClass(NAME)); // Add testjob jar file to classpath. Configuration conf = new Configuration(); conf.setClassLoader(new URLClassLoader(new URL[]{new URL("file", null, JAR)}, null)); // Get class with no package name. String defaultPackage = this.getClass().getPackage().getName(); Class c = StreamUtil.goodClassOrNull(conf, NAME, defaultPackage); assertNotNull("Class " + NAME + " not found!", c); }
Example 8
Source File: DfsTask.java From hadoop with Apache License 2.0 | 5 votes |
/** * Invoke {@link org.apache.hadoop.fs.FsShell#main} after a * few cursory checks of the configuration. */ public void execute() throws BuildException { if (null == cmd) throw new BuildException("Missing command (cmd) argument"); argv.add(0, cmd); if (null == confloader) { setConf(getProject().getProperty("hadoop.conf.dir")); } int exit_code = 0; try { pushContext(); Configuration conf = new HdfsConfiguration(); conf.setClassLoader(confloader); exit_code = ToolRunner.run(conf, shell, argv.toArray(new String[argv.size()])); exit_code = postCmd(exit_code); if (0 > exit_code) { StringBuilder msg = new StringBuilder(); for (String s : argv) msg.append(s + " "); msg.append("failed: " + exit_code); throw new Exception(msg.toString()); } } catch (Exception e) { if (failonerror) throw new BuildException(e); } finally { popContext(); } }
Example 9
Source File: DfsTask.java From RDFS with Apache License 2.0 | 5 votes |
/** * Invoke {@link org.apache.hadoop.fs.FsShell#doMain FsShell.doMain} after a * few cursory checks of the configuration. */ public void execute() throws BuildException { if (null == cmd) throw new BuildException("Missing command (cmd) argument"); argv.add(0, cmd); if (null == confloader) { setConf(getProject().getProperty("hadoop.conf.dir")); } int exit_code = 0; try { pushContext(); Configuration conf = new Configuration(); conf.setClassLoader(confloader); exit_code = ToolRunner.run(conf, shell, argv.toArray(new String[argv.size()])); exit_code = postCmd(exit_code); if (0 > exit_code) { StringBuilder msg = new StringBuilder(); for (String s : argv) msg.append(s + " "); msg.append("failed: " + exit_code); throw new Exception(msg.toString()); } } catch (Exception e) { if (failonerror) throw new BuildException(e); } finally { popContext(); } }
Example 10
Source File: ContentPump.java From marklogic-contentpump with Apache License 2.0 | 5 votes |
/** * Set class loader for current thread and for Confifguration based on * Hadoop home. * * @param hdConfDir Hadoop home directory * @param conf Hadoop configuration * @throws MalformedURLException */ private static void setClassLoader(File hdConfDir, Configuration conf) throws Exception { ClassLoader parent = conf.getClassLoader(); URL url = hdConfDir.toURI().toURL(); URL[] urls = new URL[1]; urls[0] = url; ClassLoader classLoader = new URLClassLoader(urls, parent); Thread.currentThread().setContextClassLoader(classLoader); conf.setClassLoader(classLoader); }
Example 11
Source File: MRApps.java From big-c with Apache License 2.0 | 5 votes |
/** * Sets the provided classloader on the given configuration and as the thread * context classloader if the classloader is not null. * @param classLoader * @param conf */ public static void setClassLoader(ClassLoader classLoader, Configuration conf) { if (classLoader != null) { LOG.info("Setting classloader " + classLoader.getClass().getName() + " on the configuration and as the thread context classloader"); conf.setClassLoader(classLoader); Thread.currentThread().setContextClassLoader(classLoader); } }
Example 12
Source File: TestClassWithNoPackage.java From big-c with Apache License 2.0 | 5 votes |
@Test public void testGoodClassOrNull() throws Exception { String NAME = "ClassWithNoPackage"; ClassLoader cl = TestClassWithNoPackage.class.getClassLoader(); String JAR = JarFinder.getJar(cl.loadClass(NAME)); // Add testjob jar file to classpath. Configuration conf = new Configuration(); conf.setClassLoader(new URLClassLoader(new URL[]{new URL("file", null, JAR)}, null)); // Get class with no package name. String defaultPackage = this.getClass().getPackage().getName(); Class c = StreamUtil.goodClassOrNull(conf, NAME, defaultPackage); assertNotNull("Class " + NAME + " not found!", c); }
Example 13
Source File: HBaseConfiguration.java From hbase with Apache License 2.0 | 5 votes |
/** * Creates a Configuration with HBase resources * @return a Configuration with HBase resources */ public static Configuration create() { Configuration conf = new Configuration(); // In case HBaseConfiguration is loaded from a different classloader than // Configuration, conf needs to be set with appropriate class loader to resolve // HBase resources. conf.setClassLoader(HBaseConfiguration.class.getClassLoader()); return addHbaseResources(conf); }
Example 14
Source File: AbstractHdfsConnector.java From pulsar with Apache License 2.0 | 5 votes |
protected HdfsResources resetHDFSResources(HdfsSinkConfig hdfsSinkConfig) throws IOException { Configuration config = new ExtendedConfiguration(); config.setClassLoader(Thread.currentThread().getContextClassLoader()); getConfig(config, connectorConfig.getHdfsConfigResources()); // first check for timeout on HDFS connection, because FileSystem has a hard coded 15 minute timeout checkHdfsUriForTimeout(config); /* Disable caching of Configuration and FileSystem objects, else we cannot reconfigure * the processor without a complete restart */ String disableCacheName = String.format("fs.%s.impl.disable.cache", FileSystem.getDefaultUri(config).getScheme()); config.set(disableCacheName, "true"); // If kerberos is enabled, create the file system as the kerberos principal // -- use RESOURCE_LOCK to guarantee UserGroupInformation is accessed by only a single thread at at time FileSystem fs; UserGroupInformation ugi; synchronized (RESOURCES_LOCK) { if (SecurityUtil.isSecurityEnabled(config)) { ugi = SecurityUtil.loginKerberos(config, connectorConfig.getKerberosUserPrincipal(), connectorConfig.getKeytab()); fs = getFileSystemAsUser(config, ugi); } else { config.set("ipc.client.fallback-to-simple-auth-allowed", "true"); config.set("hadoop.security.authentication", "simple"); ugi = SecurityUtil.loginSimple(config); fs = getFileSystemAsUser(config, ugi); } } return new HdfsResources(config, fs, ugi); }
Example 15
Source File: AbstractHdfsConnector.java From pulsar with Apache License 2.0 | 5 votes |
protected HdfsResources resetHDFSResources(HdfsSinkConfig hdfsSinkConfig) throws IOException { Configuration config = new ExtendedConfiguration(); config.setClassLoader(Thread.currentThread().getContextClassLoader()); getConfig(config, connectorConfig.getHdfsConfigResources()); // first check for timeout on HDFS connection, because FileSystem has a hard coded 15 minute timeout checkHdfsUriForTimeout(config); /* Disable caching of Configuration and FileSystem objects, else we cannot reconfigure * the processor without a complete restart */ String disableCacheName = String.format("fs.%s.impl.disable.cache", FileSystem.getDefaultUri(config).getScheme()); config.set(disableCacheName, "true"); // If kerberos is enabled, create the file system as the kerberos principal // -- use RESOURCE_LOCK to guarantee UserGroupInformation is accessed by only a single thread at at time FileSystem fs; UserGroupInformation ugi; synchronized (RESOURCES_LOCK) { if (SecurityUtil.isSecurityEnabled(config)) { ugi = SecurityUtil.loginKerberos(config, connectorConfig.getKerberosUserPrincipal(), connectorConfig.getKeytab()); fs = getFileSystemAsUser(config, ugi); } else { config.set("ipc.client.fallback-to-simple-auth-allowed", "true"); config.set("hadoop.security.authentication", "simple"); ugi = SecurityUtil.loginSimple(config); fs = getFileSystemAsUser(config, ugi); } } return new HdfsResources(config, fs, ugi); }
Example 16
Source File: GenericOptionsParser.java From RDFS with Apache License 2.0 | 4 votes |
/** * Modify configuration according user-specified generic options * @param conf Configuration to be modified * @param line User-specified generic options */ private void processGeneralOptions(Configuration conf, CommandLine line) { if (line.hasOption("fs")) { FileSystem.setDefaultUri(conf, line.getOptionValue("fs")); } if (line.hasOption("jt")) { conf.set("mapred.job.tracker", line.getOptionValue("jt")); } if (line.hasOption("conf")) { String[] values = line.getOptionValues("conf"); for(String value : values) { conf.addResource(new Path(value)); } } try { if (line.hasOption("libjars")) { conf.set("tmpjars", validateFiles(line.getOptionValue("libjars"), conf)); //setting libjars in client classpath URL[] libjars = getLibJars(conf); if(libjars!=null && libjars.length>0) { conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader())); Thread.currentThread().setContextClassLoader( new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader())); } } if (line.hasOption("files")) { conf.set("tmpfiles", validateFiles(line.getOptionValue("files"), conf)); } if (line.hasOption("archives")) { conf.set("tmparchives", validateFiles(line.getOptionValue("archives"), conf)); } } catch (IOException ioe) { System.err.println(StringUtils.stringifyException(ioe)); } if (line.hasOption('D')) { String[] property = line.getOptionValues('D'); for(String prop : property) { String[] keyval = prop.split("=", 2); if (keyval.length == 2) { conf.set(keyval[0], keyval[1]); } } } conf.setBoolean("mapred.used.genericoptionsparser", true); }
Example 17
Source File: GenericOptionsParser.java From hadoop-gpu with Apache License 2.0 | 4 votes |
/** * Modify configuration according user-specified generic options * @param conf Configuration to be modified * @param line User-specified generic options */ private void processGeneralOptions(Configuration conf, CommandLine line) { if (line.hasOption("fs")) { FileSystem.setDefaultUri(conf, line.getOptionValue("fs")); } if (line.hasOption("jt")) { conf.set("mapred.job.tracker", line.getOptionValue("jt")); } if (line.hasOption("conf")) { String[] values = line.getOptionValues("conf"); for(String value : values) { conf.addResource(new Path(value)); } } try { if (line.hasOption("libjars")) { conf.set("tmpjars", validateFiles(line.getOptionValue("libjars"), conf)); //setting libjars in client classpath URL[] libjars = getLibJars(conf); if(libjars!=null && libjars.length>0) { conf.setClassLoader(new URLClassLoader(libjars, conf.getClassLoader())); Thread.currentThread().setContextClassLoader( new URLClassLoader(libjars, Thread.currentThread().getContextClassLoader())); } } if (line.hasOption("files")) { conf.set("tmpfiles", validateFiles(line.getOptionValue("files"), conf)); } if (line.hasOption("archives")) { conf.set("tmparchives", validateFiles(line.getOptionValue("archives"), conf)); } } catch (IOException ioe) { System.err.println(StringUtils.stringifyException(ioe)); } if (line.hasOption('D')) { String[] property = line.getOptionValues('D'); for(String prop : property) { String[] keyval = prop.split("=", 2); if (keyval.length == 2) { conf.set(keyval[0], keyval[1]); } } } conf.setBoolean("mapred.used.genericoptionsparser", true); }
Example 18
Source File: AbstractHadoopProcessor.java From nifi with Apache License 2.0 | 4 votes |
@Override protected Collection<ValidationResult> customValidate(ValidationContext validationContext) { final String configResources = validationContext.getProperty(HADOOP_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue(); final String explicitPrincipal = validationContext.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue(); final String explicitKeytab = validationContext.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue(); final String explicitPassword = validationContext.getProperty(kerberosProperties.getKerberosPassword()).getValue(); final KerberosCredentialsService credentialsService = validationContext.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class); final String resolvedPrincipal; final String resolvedKeytab; if (credentialsService == null) { resolvedPrincipal = explicitPrincipal; resolvedKeytab = explicitKeytab; } else { resolvedPrincipal = credentialsService.getPrincipal(); resolvedKeytab = credentialsService.getKeytab(); } final List<ValidationResult> results = new ArrayList<>(); if (StringUtils.isBlank(configResources)) { return results; } try { ValidationResources resources = validationResourceHolder.get(); // if no resources in the holder, or if the holder has different resources loaded, // then load the Configuration and set the new resources in the holder if (resources == null || !configResources.equals(resources.getConfigResources())) { getLogger().debug("Reloading validation resources"); final Configuration config = new ExtendedConfiguration(getLogger()); config.setClassLoader(Thread.currentThread().getContextClassLoader()); resources = new ValidationResources(configResources, getConfigurationFromResources(config, configResources)); validationResourceHolder.set(resources); } final Configuration conf = resources.getConfiguration(); results.addAll(KerberosProperties.validatePrincipalWithKeytabOrPassword( this.getClass().getSimpleName(), conf, resolvedPrincipal, resolvedKeytab, explicitPassword, getLogger())); } catch (final IOException e) { results.add(new ValidationResult.Builder() .valid(false) .subject("Hadoop Configuration Resources") .explanation("Could not load Hadoop Configuration resources due to: " + e) .build()); } if (credentialsService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) { results.add(new ValidationResult.Builder() .subject("Kerberos Credentials") .valid(false) .explanation("Cannot specify a Kerberos Credentials Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password") .build()); } if (!isAllowExplicitKeytab() && explicitKeytab != null) { results.add(new ValidationResult.Builder() .subject("Kerberos Credentials") .valid(false) .explanation("The '" + ALLOW_EXPLICIT_KEYTAB + "' system environment variable is configured to forbid explicitly configuring Kerberos Keytab in processors. " + "The Kerberos Credentials Service should be used instead of setting the Kerberos Keytab or Kerberos Principal property.") .build()); } return results; }
Example 19
Source File: AbstractHadoopProcessor.java From nifi with Apache License 2.0 | 4 votes |
HdfsResources resetHDFSResources(String configResources, ProcessContext context) throws IOException { Configuration config = new ExtendedConfiguration(getLogger()); config.setClassLoader(Thread.currentThread().getContextClassLoader()); getConfigurationFromResources(config, configResources); // give sub-classes a chance to process configuration preProcessConfiguration(config, context); // first check for timeout on HDFS connection, because FileSystem has a hard coded 15 minute timeout checkHdfsUriForTimeout(config); // disable caching of Configuration and FileSystem objects, else we cannot reconfigure the processor without a complete // restart String disableCacheName = String.format("fs.%s.impl.disable.cache", FileSystem.getDefaultUri(config).getScheme()); config.set(disableCacheName, "true"); // If kerberos is enabled, create the file system as the kerberos principal // -- use RESOURCE_LOCK to guarantee UserGroupInformation is accessed by only a single thread at at time FileSystem fs; UserGroupInformation ugi; KerberosUser kerberosUser; synchronized (RESOURCES_LOCK) { if (SecurityUtil.isSecurityEnabled(config)) { String principal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue(); String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue(); String password = context.getProperty(kerberosProperties.getKerberosPassword()).getValue(); // If the Kerberos Credentials Service is specified, we need to use its configuration, not the explicit properties for principal/keytab. // The customValidate method ensures that only one can be set, so we know that the principal & keytab above are null. final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class); if (credentialsService != null) { principal = credentialsService.getPrincipal(); keyTab = credentialsService.getKeytab(); } if (keyTab != null) { kerberosUser = new KerberosKeytabUser(principal, keyTab); } else if (password != null) { kerberosUser = new KerberosPasswordUser(principal, password); } else { throw new IOException("Unable to authenticate with Kerberos, no keytab or password was provided"); } ugi = SecurityUtil.getUgiForKerberosUser(config, kerberosUser); } else { config.set("ipc.client.fallback-to-simple-auth-allowed", "true"); config.set("hadoop.security.authentication", "simple"); ugi = SecurityUtil.loginSimple(config); kerberosUser = null; } fs = getFileSystemAsUser(config, ugi); } getLogger().debug("resetHDFSResources UGI [{}], KerberosUser [{}]", new Object[]{ugi, kerberosUser}); final Path workingDir = fs.getWorkingDirectory(); getLogger().info("Initialized a new HDFS File System with working dir: {} default block size: {} default replication: {} config: {}", new Object[]{workingDir, fs.getDefaultBlockSize(workingDir), fs.getDefaultReplication(workingDir), config.toString()}); return new HdfsResources(config, fs, ugi, kerberosUser); }
Example 20
Source File: ConnFactory.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 4 votes |
/** * If $SQOOP_CONF_DIR/managers.d/ exists and sqoop.connection.factories is * not set, then we look through the files in that directory; they should * contain lines of the form mgr.class.name[=/path/to/containing.jar]. * * <p> * Put all mgr.class.names into the Configuration, and load any specified * jars into the ClassLoader. * </p> * * @param conf the current configuration to populate with class names. * @return conf again, after possibly populating sqoop.connection.factories. */ private Configuration loadManagersFromConfDir(Configuration conf) { if (conf.get(FACTORY_CLASS_NAMES_KEY) != null) { LOG.debug(FACTORY_CLASS_NAMES_KEY + " is set; ignoring managers.d"); return conf; } String confDirName = System.getenv("SQOOP_CONF_DIR"); if (null == confDirName) { LOG.warn("$SQOOP_CONF_DIR has not been set in the environment. " + "Cannot check for additional configuration."); return conf; } File confDir = new File(confDirName); File mgrDir = new File(confDir, "managers.d"); if (mgrDir.exists() && mgrDir.isDirectory()) { // We have a managers.d subdirectory. Get the file list, sort it, // and process them in order. String[] fileNames; try { fileNames = mgrDir.list(); } catch (SecurityException e) { fileNames = null; } if (null == fileNames) { LOG.warn("Sqoop cannot read $SQOOP_CONF_DIR/managers.d. " + "Please check the permissions on managers.d."); return conf; } Arrays.sort(fileNames); for (String fileName : fileNames) { File f = new File(mgrDir, fileName); if (f.isFile()) { addManagersFromFile(conf, f); } } // Add the default MF. addManager(conf, DEFAULT_FACTORY_CLASS_NAMES); } // Set the classloader in this configuration so that it will use // the jars we just loaded in. conf.setClassLoader(Thread.currentThread().getContextClassLoader()); return conf; }