org.apache.hadoop.util.ReflectionUtils Java Examples
The following examples show how to use
org.apache.hadoop.util.ReflectionUtils.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HttpFSFileSystem.java From hadoop with Apache License 2.0 | 6 votes |
/** * Called after a new FileSystem instance is constructed. * * @param name a uri whose authority section names the host, port, etc. for this FileSystem * @param conf the configuration */ @Override public void initialize(URI name, Configuration conf) throws IOException { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); //the real use is the one that has the Kerberos credentials needed for //SPNEGO to work realUser = ugi.getRealUser(); if (realUser == null) { realUser = UserGroupInformation.getLoginUser(); } super.initialize(name, conf); try { uri = new URI(name.getScheme() + "://" + name.getAuthority()); } catch (URISyntaxException ex) { throw new IOException(ex); } Class<? extends DelegationTokenAuthenticator> klass = getConf().getClass("httpfs.authenticator.class", KerberosDelegationTokenAuthenticator.class, DelegationTokenAuthenticator.class); DelegationTokenAuthenticator authenticator = ReflectionUtils.newInstance(klass, getConf()); authURL = new DelegationTokenAuthenticatedURL(authenticator); }
Example #2
Source File: TestTextInputFormat.java From hadoop-gpu with Apache License 2.0 | 6 votes |
/** * Test using the gzip codec and an empty input file */ public static void testGzipEmpty() throws IOException { JobConf job = new JobConf(); CompressionCodec gzip = new GzipCodec(); ReflectionUtils.setConf(gzip, job); localFs.delete(workDir, true); writeFile(localFs, new Path(workDir, "empty.gz"), gzip, ""); FileInputFormat.setInputPaths(job, workDir); TextInputFormat format = new TextInputFormat(); format.configure(job); InputSplit[] splits = format.getSplits(job, 100); assertEquals("Compressed files of length 0 are not returned from FileInputFormat.getSplits().", 1, splits.length); List<Text> results = readSplit(format, splits[0], job); assertEquals("Compressed empty file length == 0", 0, results.size()); }
Example #3
Source File: HttpServer2.java From big-c with Apache License 2.0 | 6 votes |
/** Get an array of FilterConfiguration specified in the conf */ private static FilterInitializer[] getFilterInitializers(Configuration conf) { if (conf == null) { return null; } Class<?>[] classes = conf.getClasses(FILTER_INITIALIZER_PROPERTY); if (classes == null) { return null; } FilterInitializer[] initializers = new FilterInitializer[classes.length]; for(int i = 0; i < classes.length; i++) { initializers[i] = (FilterInitializer)ReflectionUtils.newInstance( classes[i], conf); } return initializers; }
Example #4
Source File: RMProxy.java From hadoop with Apache License 2.0 | 6 votes |
/** * Helper method to create FailoverProxyProvider. */ private <T> RMFailoverProxyProvider<T> createRMFailoverProxyProvider( Configuration conf, Class<T> protocol) { Class<? extends RMFailoverProxyProvider<T>> defaultProviderClass; try { defaultProviderClass = (Class<? extends RMFailoverProxyProvider<T>>) Class.forName( YarnConfiguration.DEFAULT_CLIENT_FAILOVER_PROXY_PROVIDER); } catch (Exception e) { throw new YarnRuntimeException("Invalid default failover provider class" + YarnConfiguration.DEFAULT_CLIENT_FAILOVER_PROXY_PROVIDER, e); } RMFailoverProxyProvider<T> provider = ReflectionUtils.newInstance( conf.getClass(YarnConfiguration.CLIENT_FAILOVER_PROXY_PROVIDER, defaultProviderClass, RMFailoverProxyProvider.class), conf); provider.init(conf, (RMProxy<T>) this, protocol); return provider; }
Example #5
Source File: JobSubmitter.java From big-c with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private <T extends InputSplit> int writeNewSplits(JobContext job, Path jobSubmitDir) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = job.getConfiguration(); InputFormat<?, ?> input = ReflectionUtils.newInstance(job.getInputFormatClass(), conf); List<InputSplit> splits = input.getSplits(job); T[] array = (T[]) splits.toArray(new InputSplit[splits.size()]); // sort the splits into order based on size, so that the biggest // go first Arrays.sort(array, new SplitComparator()); JobSplitWriter.createSplitFiles(jobSubmitDir, conf, jobSubmitDir.getFileSystem(conf), array); return array.length; }
Example #6
Source File: SCMStore.java From hadoop with Apache License 2.0 | 6 votes |
/** * Create an instance of the AppChecker service via reflection based on the * {@link YarnConfiguration#SCM_APP_CHECKER_CLASS} parameter. * * @param conf * @return an instance of the AppChecker class */ @Private @SuppressWarnings("unchecked") public static AppChecker createAppCheckerService(Configuration conf) { Class<? extends AppChecker> defaultCheckerClass; try { defaultCheckerClass = (Class<? extends AppChecker>) Class .forName(YarnConfiguration.DEFAULT_SCM_APP_CHECKER_CLASS); } catch (Exception e) { throw new YarnRuntimeException("Invalid default scm app checker class" + YarnConfiguration.DEFAULT_SCM_APP_CHECKER_CLASS, e); } AppChecker checker = ReflectionUtils.newInstance(conf.getClass( YarnConfiguration.SCM_APP_CHECKER_CLASS, defaultCheckerClass, AppChecker.class), conf); return checker; }
Example #7
Source File: TestDFSIO.java From big-c with Apache License 2.0 | 6 votes |
@Override // Mapper public void configure(JobConf conf) { super.configure(conf); // grab compression String compression = getConf().get("test.io.compression.class", null); Class<? extends CompressionCodec> codec; // try to initialize codec try { codec = (compression == null) ? null : Class.forName(compression).asSubclass(CompressionCodec.class); } catch(Exception e) { throw new RuntimeException("Compression codec not found: ", e); } if(codec != null) { compressionCodec = (CompressionCodec) ReflectionUtils.newInstance(codec, getConf()); } }
Example #8
Source File: KMSWebApp.java From ranger with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private KeyACLs getAcls(String clsStr) throws IOException { KeyACLs keyAcl = null; try { Class<? extends KeyACLs> cls = null; if (clsStr == null || clsStr.trim().equals("")) { cls = KMSACLs.class; } else { Class<?> configClass = Class.forName(clsStr); if(!KeyACLs.class.isAssignableFrom(configClass) ){ throw new RuntimeException(clsStr+" should implement KeyACLs"); } cls = (Class<? extends KeyACLs>)configClass; } if (cls != null) { keyAcl = ReflectionUtils.newInstance(cls, kmsConf); } } catch (Exception e) { LOG.error("Unable to getAcls with an exception", e); throw new IOException(e.getMessage()); } return keyAcl; }
Example #9
Source File: TaskTracker.java From RDFS with Apache License 2.0 | 6 votes |
@Override public void run() { LOG.info("Starting HeartbeatMonitor"); boolean forceExit = false; long gap = 0; while (running && !shuttingDown) { long now = System.currentTimeMillis(); gap = now - lastHeartbeat; if (gap > maxHeartbeatGap) { forceExit = true; break; } try { Thread.sleep(1000); } catch (InterruptedException e) { } } if (forceExit) { LOG.fatal("No heartbeat for " + gap + " msec, TaskTracker has to die"); ReflectionUtils.logThreadInfo(LOG, "No heartbeat", 1); System.exit(-1); } else { LOG.info("Stopping HeartbeatMonitor, running=" + running + ", shuttingDown=" + shuttingDown); } }
Example #10
Source File: Task.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public void combine(RawKeyValueIterator kvIter, OutputCollector<K,V> combineCollector ) throws IOException { Reducer<K,V,K,V> combiner = ReflectionUtils.newInstance(combinerClass, job); try { CombineValuesIterator<K,V> values = new CombineValuesIterator<K,V>(kvIter, comparator, keyClass, valueClass, job, reporter, inputCounter); while (values.more()) { combiner.reduce(values.getKey(), values, combineCollector, reporter); values.nextKey(); } } finally { combiner.close(); } }
Example #11
Source File: MultiMROutput.java From tez with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private synchronized RecordWriter getNewRecordWriter( TaskAttemptContext taskContext, String baseFileName) throws IOException, InterruptedException { // look for record-writer in the cache RecordWriter writer = newRecordWriters.get(baseFileName); // If not in cache, create a new one if (writer == null) { // get the record writer from context output format taskContext.getConfiguration().set( MRJobConfig.FILEOUTPUTFORMAT_BASE_OUTPUT_NAME, baseFileName); try { writer = ((OutputFormat) ReflectionUtils.newInstance( taskContext.getOutputFormatClass(), taskContext.getConfiguration())) .getRecordWriter(taskContext); } catch (ClassNotFoundException e) { throw new IOException(e); } // add the record-writer to the cache newRecordWriters.put(baseFileName, writer); } return writer; }
Example #12
Source File: JSONFileOutputFormat.java From ojai with Apache License 2.0 | 6 votes |
@Override public RecordWriter<LongWritable, Document> getRecordWriter( TaskAttemptContext job) throws IOException, InterruptedException { Configuration conf = job.getConfiguration(); boolean isCompressed = getCompressOutput(job); CompressionCodec codec = null; String extension = ""; if (isCompressed) { Class<? extends CompressionCodec> codecClass = getOutputCompressorClass( job, GzipCodec.class); codec = ReflectionUtils.newInstance(codecClass, conf); extension = codec.getDefaultExtension(); } Path path = getDefaultWorkFile(job, extension); FileSystem fs = path.getFileSystem(conf); FSDataOutputStream out = fs.create(path, false); if (!isCompressed) { return new JSONFileOutputRecordWriter(out); } else { return new JSONFileOutputRecordWriter(new DataOutputStream( codec.createOutputStream(out))); } }
Example #13
Source File: NetezzaExternalTableHCatImportMapper.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
@Override protected void setup(Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); helper = new SqoopHCatImportHelper(conf); String recordClassName = conf.get(ConfigurationHelper .getDbInputClassProperty()); if (null == recordClassName) { throw new IOException("DB Input class name is not set!"); } try { Class<?> cls = Class.forName(recordClassName, true, Thread.currentThread().getContextClassLoader()); sqoopRecord = (SqoopRecord) ReflectionUtils.newInstance(cls, conf); } catch (ClassNotFoundException cnfe) { throw new IOException(cnfe); } if (null == sqoopRecord) { throw new IOException("Could not instantiate object of type " + recordClassName); } }
Example #14
Source File: MultithreadedMapper.java From marklogic-contentpump with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override public boolean nextKeyValue() throws IOException, InterruptedException { if (!outer.nextKeyValue()) { return false; } if (outer.getCurrentKey() == null) { return true; } key = (K1) ReflectionUtils.newInstance(outer.getCurrentKey() .getClass(), outer.getConfiguration()); key = ReflectionUtils.copy(outer.getConfiguration(), outer.getCurrentKey(), key); V1 outerVal = outer.getCurrentValue(); if (outerVal != null) { value = (V1) ReflectionUtils.newInstance(outerVal.getClass(), outer.getConfiguration()); value = ReflectionUtils.copy(outer.getConfiguration(), outer.getCurrentValue(), value); } return true; }
Example #15
Source File: MergeManagerImpl.java From hadoop with Apache License 2.0 | 6 votes |
private void combineAndSpill( RawKeyValueIterator kvIter, Counters.Counter inCounter) throws IOException { JobConf job = jobConf; Reducer combiner = ReflectionUtils.newInstance(combinerClass, job); Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass(); Class<V> valClass = (Class<V>) job.getMapOutputValueClass(); RawComparator<K> comparator = (RawComparator<K>)job.getCombinerKeyGroupingComparator(); try { CombineValuesIterator values = new CombineValuesIterator( kvIter, comparator, keyClass, valClass, job, Reporter.NULL, inCounter); while (values.more()) { combiner.reduce(values.getKey(), values, combineCollector, Reporter.NULL); values.nextKey(); } } finally { combiner.close(); } }
Example #16
Source File: Chain.java From hadoop-gpu with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example #17
Source File: InjectableConnManager.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 5 votes |
/** * Allow the user to inject custom mapper, input, and output formats * into the importTable() process. */ @Override @SuppressWarnings("unchecked") public void importTable(ImportJobContext context) throws IOException, ImportException { SqoopOptions options = context.getOptions(); Configuration conf = options.getConf(); Class<? extends Mapper> mapperClass = (Class<? extends Mapper>) conf.getClass(MAPPER_KEY, Mapper.class); Class<? extends InputFormat> ifClass = (Class<? extends InputFormat>) conf.getClass(INPUT_FORMAT_KEY, TextInputFormat.class); Class<? extends OutputFormat> ofClass = (Class<? extends OutputFormat>) conf.getClass(OUTPUT_FORMAT_KEY, TextOutputFormat.class); Class<? extends ImportJobBase> jobClass = (Class<? extends ImportJobBase>) conf.getClass(IMPORT_JOB_KEY, ImportJobBase.class); String tableName = context.getTableName(); // Instantiate the user's chosen ImportJobBase instance. ImportJobBase importJob = ReflectionUtils.newInstance(jobClass, conf); // And configure the dependencies to inject importJob.setOptions(options); importJob.setMapperClass(mapperClass); importJob.setInputFormatClass(ifClass); importJob.setOutputFormatClass(ofClass); importJob.runImport(tableName, context.getJarFile(), getSplitColumn(options, tableName), conf); }
Example #18
Source File: LazyOutputFormat.java From hadoop with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private void getBaseOutputFormat(JobConf job) throws IOException { baseOut = ReflectionUtils.newInstance( job.getClass("mapreduce.output.lazyoutputformat.outputformat", null, OutputFormat.class), job); if (baseOut == null) { throw new IOException("Ouput format not set for LazyOutputFormat"); } }
Example #19
Source File: MetricsSourceBuilder.java From big-c with Apache License 2.0 | 5 votes |
private MetricsRegistry initRegistry(Object source) { Class<?> cls = source.getClass(); MetricsRegistry r = null; // Get the registry if it already exists. for (Field field : ReflectionUtils.getDeclaredFieldsIncludingInherited(cls)) { if (field.getType() != MetricsRegistry.class) continue; try { field.setAccessible(true); r = (MetricsRegistry) field.get(source); hasRegistry = r != null; break; } catch (Exception e) { LOG.warn("Error accessing field "+ field, e); continue; } } // Create a new registry according to annotation for (Annotation annotation : cls.getAnnotations()) { if (annotation instanceof Metrics) { Metrics ma = (Metrics) annotation; info = factory.getInfo(cls, ma); if (r == null) { r = new MetricsRegistry(info); } r.setContext(ma.context()); } } if (r == null) return new MetricsRegistry(cls.getSimpleName()); return r; }
Example #20
Source File: TestApacheHttpdLogfileInputFormat.java From logparser with Apache License 2.0 | 5 votes |
@Test public void checkInputFormat() throws IOException, InterruptedException { Configuration conf = new Configuration(false); conf.set("fs.default.name", "file:///"); conf.set("nl.basjes.parse.apachehttpdlogline.format", logformat); // A ',' separated list of fields conf.set("nl.basjes.parse.apachehttpdlogline.fields", "TIME.EPOCH:request.receive.time.epoch," + "HTTP.USERAGENT:request.user-agent"); File testFile = new File("src/test/resources/access.log"); Path path = new Path(testFile.getAbsoluteFile().toURI()); FileSplit split = new FileSplit(path, 0, testFile.length(), null); InputFormat inputFormat = ReflectionUtils.newInstance(ApacheHttpdLogfileInputFormat.class, conf); TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID()); RecordReader reader = inputFormat.createRecordReader(split, context); reader.initialize(split, context); assertTrue(reader.nextKeyValue()); Object value = reader.getCurrentValue(); if (value instanceof ParsedRecord) { assertEquals("1483272081000", ((ParsedRecord) value).getString("TIME.EPOCH:request.receive.time.epoch")); assertEquals("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36", ((ParsedRecord) value).getString("HTTP.USERAGENT:request.user-agent")); } else { fail("Wrong return class type"); } }
Example #21
Source File: BackupRestoreFactory.java From hbase with Apache License 2.0 | 5 votes |
/** * Gets backup restore job * @param conf configuration * @return backup restore job instance */ public static RestoreJob getRestoreJob(Configuration conf) { Class<? extends RestoreJob> cls = conf.getClass(HBASE_INCR_RESTORE_IMPL_CLASS, MapReduceRestoreJob.class, RestoreJob.class); RestoreJob service = ReflectionUtils.newInstance(cls, conf); service.setConf(conf); return service; }
Example #22
Source File: FileInputFormat.java From hadoop-gpu with Apache License 2.0 | 5 votes |
/** * Get a PathFilter instance of the filter set for the input paths. * * @return the PathFilter instance set for the job, NULL if none has been set. */ public static PathFilter getInputPathFilter(JobConf conf) { Class<? extends PathFilter> filterClass = conf.getClass( "mapred.input.pathFilter.class", null, PathFilter.class); return (filterClass != null) ? ReflectionUtils.newInstance(filterClass, conf) : null; }
Example #23
Source File: MapRunner.java From hadoop-gpu with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public void configure(JobConf job) { this.mapper = ReflectionUtils.newInstance(job.getMapperClass(), job); //increment processed counter only if skipping feature is enabled this.incrProcCount = SkipBadRecords.getMapperMaxSkipRecords(job)>0 && SkipBadRecords.getAutoIncrMapperProcCount(job); }
Example #24
Source File: GenericMRLoadGenerator.java From big-c with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public RecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); InputFormat<K, V> indirIF = (InputFormat)ReflectionUtils.newInstance( conf.getClass(INDIRECT_INPUT_FORMAT, SequenceFileInputFormat.class), conf); IndirectSplit is = ((IndirectSplit)split); return indirIF.createRecordReader(new FileSplit(is.getPath(), 0, is.getLength(), (String[])null), context); }
Example #25
Source File: Compression.java From hbase with Apache License 2.0 | 5 votes |
private CompressionCodec buildCodec(Configuration conf) { try { Class<?> externalCodec = getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.ZStandardCodec"); return (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } }
Example #26
Source File: CreateHTableJob.java From Kylin with Apache License 2.0 | 5 votes |
@SuppressWarnings("deprecation") public byte[][] getSplits(Configuration conf, Path path) throws Exception { FileSystem fs = path.getFileSystem(conf); if (fs.exists(path) == false) { System.err.println("Path " + path + " not found, no region split, HTable will be one region"); return null; } List<byte[]> rowkeyList = new ArrayList<byte[]>(); SequenceFile.Reader reader = null; try { reader = new SequenceFile.Reader(fs, path, conf); Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf); Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf); while (reader.next(key, value)) { rowkeyList.add(((Text) key).copyBytes()); } } catch (Exception e) { e.printStackTrace(); throw e; } finally { IOUtils.closeStream(reader); } logger.info((rowkeyList.size() + 1) + " regions"); logger.info(rowkeyList.size() + " splits"); for (byte[] split : rowkeyList) { System.out.println(StringUtils.byteToHexString(split)); } byte[][] retValue = rowkeyList.toArray(new byte[rowkeyList.size()][]); return retValue.length == 0 ? null : retValue; }
Example #27
Source File: TestDataTransferKeepalive.java From hadoop with Apache License 2.0 | 5 votes |
private void assertXceiverCount(int expected) { int count = getXceiverCountWithoutServer(); if (count != expected) { ReflectionUtils.printThreadInfo(System.err, "Thread dumps"); fail("Expected " + expected + " xceivers, found " + count); } }
Example #28
Source File: GenericMRLoadGenerator.java From hadoop with Apache License 2.0 | 5 votes |
public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { InputFormat indirIF = (InputFormat)ReflectionUtils.newInstance( job.getClass(org.apache.hadoop.mapreduce. GenericMRLoadGenerator.INDIRECT_INPUT_FORMAT, SequenceFileInputFormat.class), job); IndirectSplit is = ((IndirectSplit)split); return indirIF.getRecordReader(new FileSplit(is.getPath(), 0, is.getLength(), (String[])null), job, reporter); }
Example #29
Source File: MultiFilterRecordReader.java From RDFS with Apache License 2.0 | 5 votes |
/** {@inheritDoc} */ @SuppressWarnings("unchecked") // Explicit check for value class agreement public V createValue() { if (null == valueclass) { final Class<?> cls = kids[0].createValue().getClass(); for (RecordReader<K,? extends V> rr : kids) { if (!cls.equals(rr.createValue().getClass())) { throw new ClassCastException("Child value classes fail to agree"); } } valueclass = cls.asSubclass(Writable.class); ivalue = createInternalValue(); } return (V) ReflectionUtils.newInstance(valueclass, null); }
Example #30
Source File: TaggedInputSplit.java From kangaroo with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public void readFields(DataInput in) throws IOException { inputSplitClass = (Class<? extends InputSplit>) readClass(in); inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in); mapperClass = (Class<? extends Mapper<?, ?, ?, ?>>) readClass(in); inputSplit = ReflectionUtils.newInstance(inputSplitClass, conf); SerializationFactory factory = new SerializationFactory(conf); Deserializer deserializer = factory.getDeserializer(inputSplitClass); deserializer.open((DataInputStream) in); inputSplit = (InputSplit) deserializer.deserialize(inputSplit); }