org.apache.hadoop.hbase.io.util.LRUDictionary Java Examples
The following examples show how to use
org.apache.hadoop.hbase.io.util.LRUDictionary.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ReaderBase.java From hbase with Apache License 2.0 | 6 votes |
@Override public void init(FileSystem fs, Path path, Configuration conf, FSDataInputStream stream) throws IOException { this.conf = conf; this.path = path; this.fs = fs; this.fileLength = this.fs.getFileStatus(path).getLen(); String cellCodecClsName = initReader(stream); boolean compression = hasCompression(); if (compression) { // If compression is enabled, new dictionaries are created here. try { if (compressionContext == null) { compressionContext = new CompressionContext(LRUDictionary.class, CommonFSUtils.isRecoveredEdits(path), hasTagCompression()); } else { compressionContext.clear(); } } catch (Exception e) { throw new IOException("Failed to initialize CompressionContext", e); } } initAfterCompression(cellCodecClsName); }
Example #2
Source File: TestTagCompressionContext.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testCompressUncompressTags1() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE); KeyValue kv1 = createKVWithTags(2); int tagsLength1 = kv1.getTagsLength(); ByteBuffer ib = ByteBuffer.wrap(kv1.getTagsArray()); context.compressTags(baos, ib, kv1.getTagsOffset(), tagsLength1); KeyValue kv2 = createKVWithTags(3); int tagsLength2 = kv2.getTagsLength(); ib = ByteBuffer.wrap(kv2.getTagsArray()); context.compressTags(baos, ib, kv2.getTagsOffset(), tagsLength2); context.clear(); byte[] dest = new byte[tagsLength1]; ByteBuffer ob = ByteBuffer.wrap(baos.toByteArray()); context.uncompressTags(new SingleByteBuff(ob), dest, 0, tagsLength1); assertTrue(Bytes.equals(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1, dest, 0, tagsLength1)); dest = new byte[tagsLength2]; context.uncompressTags(new SingleByteBuff(ob), dest, 0, tagsLength2); assertTrue(Bytes.equals(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2, dest, 0, tagsLength2)); }
Example #3
Source File: TestTagCompressionContext.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testCompressUncompressTagsWithOffheapKeyValue1() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream daos = new ByteBufferWriterDataOutputStream(baos); TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE); ByteBufferExtendedCell kv1 = (ByteBufferExtendedCell)createOffheapKVWithTags(2); int tagsLength1 = kv1.getTagsLength(); context.compressTags(daos, kv1.getTagsByteBuffer(), kv1.getTagsPosition(), tagsLength1); ByteBufferExtendedCell kv2 = (ByteBufferExtendedCell)createOffheapKVWithTags(3); int tagsLength2 = kv2.getTagsLength(); context.compressTags(daos, kv2.getTagsByteBuffer(), kv2.getTagsPosition(), tagsLength2); context.clear(); byte[] dest = new byte[tagsLength1]; ByteBuffer ob = ByteBuffer.wrap(baos.getBuffer()); context.uncompressTags(new SingleByteBuff(ob), dest, 0, tagsLength1); assertTrue(Bytes.equals(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1, dest, 0, tagsLength1)); dest = new byte[tagsLength2]; context.uncompressTags(new SingleByteBuff(ob), dest, 0, tagsLength2); assertTrue(Bytes.equals(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2, dest, 0, tagsLength2)); }
Example #4
Source File: TestTagCompressionContext.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testCompressUncompressTags2() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE); KeyValue kv1 = createKVWithTags(1); int tagsLength1 = kv1.getTagsLength(); context.compressTags(baos, kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1); KeyValue kv2 = createKVWithTags(3); int tagsLength2 = kv2.getTagsLength(); context.compressTags(baos, kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2); context.clear(); ByteArrayInputStream bais = new ByteArrayInputStream(baos.getBuffer()); byte[] dest = new byte[tagsLength1]; context.uncompressTags(bais, dest, 0, tagsLength1); assertTrue(Bytes.equals(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1, dest, 0, tagsLength1)); dest = new byte[tagsLength2]; context.uncompressTags(bais, dest, 0, tagsLength2); assertTrue(Bytes.equals(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2, dest, 0, tagsLength2)); }
Example #5
Source File: TestTagCompressionContext.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testCompressUncompressTagsWithOffheapKeyValue2() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream daos = new ByteBufferWriterDataOutputStream(baos); TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE); ByteBufferExtendedCell kv1 = (ByteBufferExtendedCell)createOffheapKVWithTags(1); int tagsLength1 = kv1.getTagsLength(); context.compressTags(daos, kv1.getTagsByteBuffer(), kv1.getTagsPosition(), tagsLength1); ByteBufferExtendedCell kv2 = (ByteBufferExtendedCell)createOffheapKVWithTags(3); int tagsLength2 = kv2.getTagsLength(); context.compressTags(daos, kv2.getTagsByteBuffer(), kv2.getTagsPosition(), tagsLength2); context.clear(); ByteArrayInputStream bais = new ByteArrayInputStream(baos.getBuffer()); byte[] dest = new byte[tagsLength1]; context.uncompressTags(bais, dest, 0, tagsLength1); assertTrue(Bytes.equals(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1, dest, 0, tagsLength1)); dest = new byte[tagsLength2]; context.uncompressTags(bais, dest, 0, tagsLength2); assertTrue(Bytes.equals(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2, dest, 0, tagsLength2)); }
Example #6
Source File: ReadWriteKeyValuesWithCodecIT.java From phoenix with Apache License 2.0 | 5 votes |
@Test public void testWithCompression() throws Exception { // get the FS ready to read/write the edit Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression"); Path testFile = new Path(testDir, "testfile"); FileSystem fs = UTIL.getTestFileSystem(); List<WALEdit> edits = getEdits(); CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false); writeReadAndVerify(compression, fs, edits, testFile); }
Example #7
Source File: AbstractProtobufLogWriter.java From hbase with Apache License 2.0 | 5 votes |
private boolean initializeCompressionContext(Configuration conf, Path path) throws IOException { boolean doCompress = conf.getBoolean(HConstants.ENABLE_WAL_COMPRESSION, false); if (doCompress) { try { this.compressionContext = new CompressionContext(LRUDictionary.class, CommonFSUtils.isRecoveredEdits(path), conf.getBoolean(CompressionContext.ENABLE_WAL_TAGS_COMPRESSION, true)); } catch (Exception e) { throw new IOException("Failed to initiate CompressionContext", e); } } return doCompress; }
Example #8
Source File: BufferedDataBlockEncoder.java From hbase with Apache License 2.0 | 5 votes |
@Override public ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext blkDecodingCtx) throws IOException { if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) { throw new IOException(this.getClass().getName() + " only accepts " + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context."); } HFileBlockDefaultDecodingContext decodingCtx = (HFileBlockDefaultDecodingContext) blkDecodingCtx; if (decodingCtx.getHFileContext().isIncludesTags() && decodingCtx.getHFileContext().isCompressTags()) { if (decodingCtx.getTagCompressionContext() != null) { // It will be overhead to create the TagCompressionContext again and again for every block // decoding. decodingCtx.getTagCompressionContext().clear(); } else { try { TagCompressionContext tagCompressionContext = new TagCompressionContext( LRUDictionary.class, Byte.MAX_VALUE); decodingCtx.setTagCompressionContext(tagCompressionContext); } catch (Exception e) { throw new IOException("Failed to initialize TagCompressionContext", e); } } } return internalDecodeKeyValues(source, 0, 0, decodingCtx); }
Example #9
Source File: BufferedDataBlockEncoder.java From hbase with Apache License 2.0 | 5 votes |
public BufferedEncodedSeeker(HFileBlockDecodingContext decodingCtx) { super(decodingCtx); if (decodingCtx.getHFileContext().isCompressTags()) { try { tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE); } catch (Exception e) { throw new RuntimeException("Failed to initialize TagCompressionContext", e); } } current = createSeekerState(); // always valid previous = createSeekerState(); // may not be valid }
Example #10
Source File: BufferedDataBlockEncoder.java From hbase with Apache License 2.0 | 5 votes |
@Override public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx, DataOutputStream out) throws IOException { if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) { throw new IOException(this.getClass().getName() + " only accepts " + HFileBlockDefaultEncodingContext.class.getName() + " as the " + "encoding context."); } HFileBlockDefaultEncodingContext encodingCtx = (HFileBlockDefaultEncodingContext) blkEncodingCtx; encodingCtx.prepareEncoding(out); if (encodingCtx.getHFileContext().isIncludesTags() && encodingCtx.getHFileContext().isCompressTags()) { if (encodingCtx.getTagCompressionContext() != null) { // It will be overhead to create the TagCompressionContext again and again for every block // encoding. encodingCtx.getTagCompressionContext().clear(); } else { try { TagCompressionContext tagCompressionContext = new TagCompressionContext( LRUDictionary.class, Byte.MAX_VALUE); encodingCtx.setTagCompressionContext(tagCompressionContext); } catch (Exception e) { throw new IOException("Failed to initialize TagCompressionContext", e); } } } StreamUtils.writeInt(out, 0); // DUMMY length. This will be updated in endBlockEncoding() blkEncodingCtx.setEncodingState(new EncodingState()); }
Example #11
Source File: IndexedWALEditCodecTest.java From phoenix with Apache License 2.0 | 5 votes |
@SuppressWarnings("unused") @Test public void testConstructorsArePresent() throws Exception { // "testing" via the presence of these constructors IndexedWALEditCodec codec1 = new IndexedWALEditCodec(); IndexedWALEditCodec codec2 = new IndexedWALEditCodec(new Configuration(false), new CompressionContext(LRUDictionary.class, false, false)); }
Example #12
Source File: ReadWriteKeyValuesWithCodecTest.java From phoenix with Apache License 2.0 | 5 votes |
@Test public void testWithCompression() throws Exception { // get the FS ready to read/write the edit Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression"); Path testFile = new Path(testDir, "testfile"); FileSystem fs = UTIL.getTestFileSystem(); List<WALEdit> edits = getEdits(); CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false); writeReadAndVerify(compression, fs, edits, testFile); }