Java Code Examples for org.apache.hadoop.hdfs.client.HdfsDataInputStream#close()
The following examples show how to use
org.apache.hadoop.hdfs.client.HdfsDataInputStream#close() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestReadWhileWriting.java From hadoop with Apache License 2.0 | 6 votes |
static void checkFile(Path p, int expectedsize, final Configuration conf ) throws IOException, InterruptedException { //open the file with another user account final String username = UserGroupInformation.getCurrentUser().getShortUserName() + "_" + ++userCount; UserGroupInformation ugi = UserGroupInformation.createUserForTesting(username, new String[] {"supergroup"}); final FileSystem fs = DFSTestUtil.getFileSystemAs(ugi, conf); final HdfsDataInputStream in = (HdfsDataInputStream)fs.open(p); //Check visible length Assert.assertTrue(in.getVisibleLength() >= expectedsize); //Able to read? for(int i = 0; i < expectedsize; i++) { Assert.assertEquals((byte)i, (byte)in.read()); } in.close(); }
Example 2
Source File: TestReadWhileWriting.java From big-c with Apache License 2.0 | 6 votes |
static void checkFile(Path p, int expectedsize, final Configuration conf ) throws IOException, InterruptedException { //open the file with another user account final String username = UserGroupInformation.getCurrentUser().getShortUserName() + "_" + ++userCount; UserGroupInformation ugi = UserGroupInformation.createUserForTesting(username, new String[] {"supergroup"}); final FileSystem fs = DFSTestUtil.getFileSystemAs(ugi, conf); final HdfsDataInputStream in = (HdfsDataInputStream)fs.open(p); //Check visible length Assert.assertTrue(in.getVisibleLength() >= expectedsize); //Able to read? for(int i = 0; i < expectedsize; i++) { Assert.assertEquals((byte)i, (byte)in.read()); } in.close(); }
Example 3
Source File: DFSTestUtil.java From hadoop with Apache License 2.0 | 5 votes |
public static ExtendedBlock getFirstBlock(FileSystem fs, Path path) throws IOException { HdfsDataInputStream in = (HdfsDataInputStream) fs.open(path); try { in.readByte(); return in.getCurrentBlock(); } finally { in.close(); } }
Example 4
Source File: DFSTestUtil.java From big-c with Apache License 2.0 | 5 votes |
public static ExtendedBlock getFirstBlock(FileSystem fs, Path path) throws IOException { HdfsDataInputStream in = (HdfsDataInputStream) fs.open(path); try { in.readByte(); return in.getCurrentBlock(); } finally { in.close(); } }
Example 5
Source File: TestShortCircuitLocalRead.java From hadoop with Apache License 2.0 | 4 votes |
/** Check the file content, reading as user {@code readingUser} */ static void checkFileContentDirect(URI uri, Path name, byte[] expected, int readOffset, String readingUser, Configuration conf, boolean legacyShortCircuitFails) throws IOException, InterruptedException { // Ensure short circuit is enabled DistributedFileSystem fs = getFileSystem(readingUser, uri, conf); ClientContext clientContext = ClientContext.getFromConf(conf); if (legacyShortCircuitFails) { assertTrue(clientContext.getDisableLegacyBlockReaderLocal()); } HdfsDataInputStream stm = (HdfsDataInputStream)fs.open(name); ByteBuffer actual = ByteBuffer.allocateDirect(expected.length - readOffset); IOUtils.skipFully(stm, readOffset); actual.limit(3); //Read a small number of bytes first. int nread = stm.read(actual); actual.limit(nread + 2); nread += stm.read(actual); // Read across chunk boundary actual.limit(Math.min(actual.capacity(), nread + 517)); nread += stm.read(actual); checkData(arrayFromByteBuffer(actual), readOffset, expected, nread, "A few bytes"); //Now read rest of it actual.limit(actual.capacity()); while (actual.hasRemaining()) { int nbytes = stm.read(actual); if (nbytes < 0) { throw new EOFException("End of file reached before reading fully."); } nread += nbytes; } checkData(arrayFromByteBuffer(actual), readOffset, expected, "Read 3"); if (legacyShortCircuitFails) { assertTrue(clientContext.getDisableLegacyBlockReaderLocal()); } stm.close(); }
Example 6
Source File: TestShortCircuitLocalRead.java From big-c with Apache License 2.0 | 4 votes |
/** Check the file content, reading as user {@code readingUser} */ static void checkFileContentDirect(URI uri, Path name, byte[] expected, int readOffset, String readingUser, Configuration conf, boolean legacyShortCircuitFails) throws IOException, InterruptedException { // Ensure short circuit is enabled DistributedFileSystem fs = getFileSystem(readingUser, uri, conf); ClientContext clientContext = ClientContext.getFromConf(conf); if (legacyShortCircuitFails) { assertTrue(clientContext.getDisableLegacyBlockReaderLocal()); } HdfsDataInputStream stm = (HdfsDataInputStream)fs.open(name); ByteBuffer actual = ByteBuffer.allocateDirect(expected.length - readOffset); IOUtils.skipFully(stm, readOffset); actual.limit(3); //Read a small number of bytes first. int nread = stm.read(actual); actual.limit(nread + 2); nread += stm.read(actual); // Read across chunk boundary actual.limit(Math.min(actual.capacity(), nread + 517)); nread += stm.read(actual); checkData(arrayFromByteBuffer(actual), readOffset, expected, nread, "A few bytes"); //Now read rest of it actual.limit(actual.capacity()); while (actual.hasRemaining()) { int nbytes = stm.read(actual); if (nbytes < 0) { throw new EOFException("End of file reached before reading fully."); } nread += nbytes; } checkData(arrayFromByteBuffer(actual), readOffset, expected, "Read 3"); if (legacyShortCircuitFails) { assertTrue(clientContext.getDisableLegacyBlockReaderLocal()); } stm.close(); }