org.apache.hadoop.fs.BufferedFSInputStream Java Examples
The following examples show how to use
org.apache.hadoop.fs.BufferedFSInputStream.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: NativeAzureFileSystem.java From hadoop with Apache License 2.0 | 6 votes |
@Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Opening file: " + f.toString()); } Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); FileMetadata meta = store.retrieveMetadata(key); if (meta == null) { throw new FileNotFoundException(f.toString()); } if (meta.isDir()) { throw new FileNotFoundException(f.toString() + " is a directory not a file."); } return new FSDataInputStream(new BufferedFSInputStream( new NativeAzureFsInputStream(store.retrieve(key), key, meta.getLength()), bufferSize)); }
Example #2
Source File: NativeAzureFileSystem.java From big-c with Apache License 2.0 | 6 votes |
@Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Opening file: " + f.toString()); } Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); FileMetadata meta = store.retrieveMetadata(key); if (meta == null) { throw new FileNotFoundException(f.toString()); } if (meta.isDir()) { throw new FileNotFoundException(f.toString() + " is a directory not a file."); } return new FSDataInputStream(new BufferedFSInputStream( new NativeAzureFsInputStream(store.retrieve(key), key, meta.getLength()), bufferSize)); }
Example #3
Source File: DLFileSystem.java From distributedlog with Apache License 2.0 | 6 votes |
@Override public FSDataInputStream open(Path path, int bufferSize) throws IOException { try { DistributedLogManager dlm = namespace.openLog(getStreamName(path)); LogReader reader; try { reader = dlm.openLogReader(DLSN.InitialDLSN); } catch (LogNotFoundException lnfe) { throw new FileNotFoundException(path.toString()); } catch (LogEmptyException lee) { throw new FileNotFoundException(path.toString()); } return new FSDataInputStream( new BufferedFSInputStream( new DLInputStream(dlm, reader, 0L), bufferSize)); } catch (LogNotFoundException e) { throw new FileNotFoundException(path.toString()); } }
Example #4
Source File: HoodieLogFileReader.java From hudi with Apache License 2.0 | 6 votes |
public HoodieLogFileReader(FileSystem fs, HoodieLogFile logFile, Schema readerSchema, int bufferSize, boolean readBlockLazily, boolean reverseReader) throws IOException { FSDataInputStream fsDataInputStream = fs.open(logFile.getPath(), bufferSize); if (fsDataInputStream.getWrappedStream() instanceof FSInputStream) { this.inputStream = new FSDataInputStream( new BufferedFSInputStream((FSInputStream) fsDataInputStream.getWrappedStream(), bufferSize)); } else { // fsDataInputStream.getWrappedStream() maybe a BufferedFSInputStream // need to wrap in another BufferedFSInputStream the make bufferSize work? this.inputStream = fsDataInputStream; } this.logFile = logFile; this.readerSchema = readerSchema; this.readBlockLazily = readBlockLazily; this.reverseReader = reverseReader; if (this.reverseReader) { this.reverseLogFilePosition = this.lastReverseLogFilePosition = fs.getFileStatus(logFile.getPath()).getLen(); } addShutDownHook(); }
Example #5
Source File: PrestoS3FileSystem.java From presto with Apache License 2.0 | 5 votes |
@Override public FSDataInputStream open(Path path, int bufferSize) { return new FSDataInputStream( new BufferedFSInputStream( new PrestoS3InputStream(s3, getBucketName(uri), path, requesterPaysEnabled, maxAttempts, maxBackoffTime, maxRetryTime), bufferSize)); }
Example #6
Source File: NativeS3FileSystem.java From hadoop with Apache License 2.0 | 5 votes |
@Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { FileStatus fs = getFileStatus(f); // will throw if the file doesn't exist if (fs.isDirectory()) { throw new FileNotFoundException("'" + f + "' is a directory"); } LOG.info("Opening '" + f + "' for reading"); Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); return new FSDataInputStream(new BufferedFSInputStream( new NativeS3FsInputStream(store, statistics, store.retrieve(key), key), bufferSize)); }
Example #7
Source File: CachingFileSystem.java From rubix with Apache License 2.0 | 5 votes |
@Override public FSDataInputStream open(Path path, int bufferSize) throws IOException { if (skipCache(path.toString(), getConf())) { return fs.open(path, bufferSize); } Path originalPath = new Path(getOriginalURI(path.toUri()).getScheme(), path.toUri().getAuthority(), path.toUri().getPath()); if (CacheConfig.isDummyModeEnabled(this.getConf())) { return new FSDataInputStream( new BufferedFSInputStream( new DummyModeCachingInputStream(this, originalPath, this.getConf(), statsMBean, clusterManager.getClusterType(), bookKeeperFactory, fs, bufferSize, statistics), CacheConfig.getBlockSize(getConf()))); } try { return new FSDataInputStream( new BufferedFSInputStream( new CachingInputStream(originalPath, this.getConf(), statsMBean, clusterManager.getClusterType(), bookKeeperFactory, fs, bufferSize, statistics), CacheConfig.getBlockSize(getConf()))); } catch (Exception e) { if (CacheConfig.isStrictMode(this.getConf())) { log.error("Error in opening Caching Input Stream", e); throw e; } log.warn("Error in opening Caching Input Stream, skipping cache", e); return fs.open(path, bufferSize); } }
Example #8
Source File: NativeS3FileSystem.java From big-c with Apache License 2.0 | 5 votes |
@Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { FileStatus fs = getFileStatus(f); // will throw if the file doesn't exist if (fs.isDirectory()) { throw new FileNotFoundException("'" + f + "' is a directory"); } LOG.info("Opening '" + f + "' for reading"); Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); return new FSDataInputStream(new BufferedFSInputStream( new NativeS3FsInputStream(store, statistics, store.retrieve(key), key), bufferSize)); }
Example #9
Source File: SftpLightWeightFileSystem.java From incubator-gobblin with Apache License 2.0 | 5 votes |
@Override public FSDataInputStream open(Path path, int bufferSize) throws IOException { SftpGetMonitor monitor = new SftpGetMonitor(); try { ChannelSftp channelSftp = this.fsHelper.getSftpChannel(); InputStream is = channelSftp.get(HadoopUtils.toUriPath(path), monitor); return new FSDataInputStream(new BufferedFSInputStream(new SftpFsHelper.SftpFsFileInputStream(is, channelSftp), bufferSize)); } catch (SftpException e) { throw new IOException(e); } }
Example #10
Source File: NativeS3FileSystem.java From RDFS with Apache License 2.0 | 5 votes |
@Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { if (!exists(f)) { throw new FileNotFoundException(f.toString()); } Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); return new FSDataInputStream(new BufferedFSInputStream( new NativeS3FsInputStream(store.retrieve(key), key), bufferSize)); }
Example #11
Source File: NativeS3FileSystem.java From hadoop-gpu with Apache License 2.0 | 5 votes |
@Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { if (!exists(f)) { throw new FileNotFoundException(f.toString()); } Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); return new FSDataInputStream(new BufferedFSInputStream( new NativeS3FsInputStream(store.retrieve(key), key), bufferSize)); }
Example #12
Source File: LustreFileSystem.java From hadoop-gpu with Apache License 2.0 | 5 votes |
@Override public FSDataInputStream open(Path path, int bufferSize) throws IOException { if (!exists(path)) { throw new FileNotFoundException(path.toString()); } return new FSDataInputStream(new BufferedFSInputStream( new LFSInputStream(path), bufferSize)); }