Java Code Examples for org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner#atEnd()
The following examples show how to use
org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner#atEnd() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestTFileByteArrays.java From hadoop with Apache License 2.0 | 6 votes |
private void readValueWithoutKey(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); byte[] vbuf1 = new byte[BUF_SIZE]; int vlen1 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf1); Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex); if (scanner.advance() && !scanner.atEnd()) { byte[] vbuf2 = new byte[BUF_SIZE]; int vlen2 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf2); Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1)); } scanner.close(); reader.close(); }
Example 2
Source File: TestTFileByteArrays.java From big-c with Apache License 2.0 | 6 votes |
private void readValueWithoutKey(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); byte[] vbuf1 = new byte[BUF_SIZE]; int vlen1 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf1); Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex); if (scanner.advance() && !scanner.atEnd()) { byte[] vbuf2 = new byte[BUF_SIZE]; int vlen2 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf2); Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1)); } scanner.close(); reader.close(); }
Example 3
Source File: TestTFileByteArrays.java From RDFS with Apache License 2.0 | 6 votes |
private void readValueWithoutKey(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); byte[] vbuf1 = new byte[BUF_SIZE]; int vlen1 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf1); Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex); if (scanner.advance() && !scanner.atEnd()) { byte[] vbuf2 = new byte[BUF_SIZE]; int vlen2 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf2); Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1)); } scanner.close(); reader.close(); }
Example 4
Source File: TestTFileByteArrays.java From hadoop-gpu with Apache License 2.0 | 6 votes |
private void readValueWithoutKey(int count, int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(composeSortedKey(KEY, count, recordIndex) .getBytes(), null); byte[] vbuf1 = new byte[BUF_SIZE]; int vlen1 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf1); Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex); if (scanner.advance() && !scanner.atEnd()) { byte[] vbuf2 = new byte[BUF_SIZE]; int vlen2 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf2); Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1)); } scanner.close(); reader.close(); }
Example 5
Source File: TestTFileSeek.java From hadoop with Apache License 2.0 | 5 votes |
public void seekTFile() throws IOException { int miss = 0; long totalBytes = 0; FSDataInputStream fsdis = fs.open(path); Reader reader = new Reader(fsdis, fs.getFileStatus(path).getLen(), conf); KeySampler kSampler = new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(), keyLenGen); Scanner scanner = reader.createScanner(); BytesWritable key = new BytesWritable(); BytesWritable val = new BytesWritable(); timer.reset(); timer.start(); for (int i = 0; i < options.seekCount; ++i) { kSampler.next(key); scanner.lowerBound(key.get(), 0, key.getSize()); if (!scanner.atEnd()) { scanner.entry().get(key, val); totalBytes += key.getSize(); totalBytes += val.getSize(); } else { ++miss; } } timer.stop(); double duration = (double) timer.read() / 1000; // in us. System.out.printf( "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n", timer.toString(), NanoTimer.nanoTimeToString(timer.read() / options.seekCount), options.seekCount - miss, miss, (double) totalBytes / 1024 / (options.seekCount - miss)); }
Example 6
Source File: TestTFileByteArrays.java From hadoop with Apache License 2.0 | 5 votes |
private void readKeyWithoutValue(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); try { // read the indexed key byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); if (scanner.advance() && !scanner.atEnd()) { // read the next key following the indexed byte[] kbuf2 = new byte[BUF_SIZE]; int klen2 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf2); Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, recordIndex + 1)); } } finally { scanner.close(); reader.close(); } }
Example 7
Source File: TestTFileSeek.java From big-c with Apache License 2.0 | 5 votes |
public void seekTFile() throws IOException { int miss = 0; long totalBytes = 0; FSDataInputStream fsdis = fs.open(path); Reader reader = new Reader(fsdis, fs.getFileStatus(path).getLen(), conf); KeySampler kSampler = new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(), keyLenGen); Scanner scanner = reader.createScanner(); BytesWritable key = new BytesWritable(); BytesWritable val = new BytesWritable(); timer.reset(); timer.start(); for (int i = 0; i < options.seekCount; ++i) { kSampler.next(key); scanner.lowerBound(key.get(), 0, key.getSize()); if (!scanner.atEnd()) { scanner.entry().get(key, val); totalBytes += key.getSize(); totalBytes += val.getSize(); } else { ++miss; } } timer.stop(); double duration = (double) timer.read() / 1000; // in us. System.out.printf( "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n", timer.toString(), NanoTimer.nanoTimeToString(timer.read() / options.seekCount), options.seekCount - miss, miss, (double) totalBytes / 1024 / (options.seekCount - miss)); }
Example 8
Source File: TestTFileByteArrays.java From big-c with Apache License 2.0 | 5 votes |
private void readKeyWithoutValue(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); try { // read the indexed key byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); if (scanner.advance() && !scanner.atEnd()) { // read the next key following the indexed byte[] kbuf2 = new byte[BUF_SIZE]; int klen2 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf2); Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, recordIndex + 1)); } } finally { scanner.close(); reader.close(); } }
Example 9
Source File: TestTFileSeek.java From RDFS with Apache License 2.0 | 5 votes |
public void seekTFile() throws IOException { int miss = 0; long totalBytes = 0; FSDataInputStream fsdis = fs.open(path); Reader reader = new Reader(fsdis, fs.getFileStatus(path).getLen(), conf); KeySampler kSampler = new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(), keyLenGen); Scanner scanner = reader.createScanner(); BytesWritable key = new BytesWritable(); BytesWritable val = new BytesWritable(); timer.reset(); timer.start(); for (int i = 0; i < options.seekCount; ++i) { kSampler.next(key); scanner.lowerBound(key.get(), 0, key.getSize()); if (!scanner.atEnd()) { scanner.entry().get(key, val); totalBytes += key.getSize(); totalBytes += val.getSize(); } else { ++miss; } } timer.stop(); double duration = (double) timer.read() / 1000; // in us. System.out.printf( "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n", timer.toString(), NanoTimer.nanoTimeToString(timer.read() / options.seekCount), options.seekCount - miss, miss, (double) totalBytes / 1024 / (options.seekCount - miss)); }
Example 10
Source File: TestTFileByteArrays.java From RDFS with Apache License 2.0 | 5 votes |
private void readKeyWithoutValue(int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex) .getBytes(), null); try { // read the indexed key byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); if (scanner.advance() && !scanner.atEnd()) { // read the next key following the indexed byte[] kbuf2 = new byte[BUF_SIZE]; int klen2 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf2); Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, recordIndex + 1)); } } finally { scanner.close(); reader.close(); } }
Example 11
Source File: TestTFileSeek.java From hadoop-gpu with Apache License 2.0 | 5 votes |
public void seekTFile() throws IOException { int miss = 0; long totalBytes = 0; FSDataInputStream fsdis = fs.open(path); Reader reader = new Reader(fsdis, fs.getFileStatus(path).getLen(), conf); KeySampler kSampler = new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(), keyLenGen); Scanner scanner = reader.createScanner(); BytesWritable key = new BytesWritable(); BytesWritable val = new BytesWritable(); timer.reset(); timer.start(); for (int i = 0; i < options.seekCount; ++i) { kSampler.next(key); scanner.lowerBound(key.get(), 0, key.getSize()); if (!scanner.atEnd()) { scanner.entry().get(key, val); totalBytes += key.getSize(); totalBytes += val.getSize(); } else { ++miss; } } timer.stop(); double duration = (double) timer.read() / 1000; // in us. System.out.printf( "time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n", timer.toString(), NanoTimer.nanoTimeToString(timer.read() / options.seekCount), options.seekCount - miss, miss, (double) totalBytes / 1024 / (options.seekCount - miss)); }
Example 12
Source File: TestTFileByteArrays.java From hadoop-gpu with Apache License 2.0 | 5 votes |
private void readKeyWithoutValue(int count, int recordIndex) throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(composeSortedKey(KEY, count, recordIndex) .getBytes(), null); try { // read the indexed key byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, count, recordIndex)); if (scanner.advance() && !scanner.atEnd()) { // read the next key following the indexed byte[] kbuf2 = new byte[BUF_SIZE]; int klen2 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf2); Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, count, recordIndex + 1)); } } finally { scanner.close(); reader.close(); } }