org.apache.hadoop.io.compress.snappy.SnappyDecompressor Java Examples
The following examples show how to use
org.apache.hadoop.io.compress.snappy.SnappyDecompressor.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestCompressorDecompressor.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testCompressorDecompressor() { // no more for this data int SIZE = 44 * 1024; byte[] rawData = generate(SIZE); try { CompressDecompressTester.of(rawData) .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor()) .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor()) .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater()) .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS, CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM)) .test(); } catch (Exception ex) { fail("testCompressorDecompressor error !!!" + ex); } }
Example #2
Source File: TestCompressorDecompressor.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testCompressorDecompressorWithExeedBufferLimit() { int BYTE_SIZE = 100 * 1024; byte[] rawData = generate(BYTE_SIZE); try { CompressDecompressTester.of(rawData) .withCompressDecompressPair( new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2), new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2)) .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE), new Lz4Decompressor(BYTE_SIZE)) .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS, CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM)) .test(); } catch (Exception ex) { fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex); } }
Example #3
Source File: SnappyCodec.java From big-c with Apache License 2.0 | 6 votes |
/** * Are the native snappy libraries loaded & initialized? */ public static void checkNativeCodeLoaded() { if (!NativeCodeLoader.isNativeCodeLoaded() || !NativeCodeLoader.buildSupportsSnappy()) { throw new RuntimeException("native snappy library not available: " + "this version of libhadoop was built without " + "snappy support."); } if (!SnappyCompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyCompressor has not been loaded."); } if (!SnappyDecompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyDecompressor has not been loaded."); } }
Example #4
Source File: SnappyCodec.java From hadoop with Apache License 2.0 | 6 votes |
/** * Are the native snappy libraries loaded & initialized? */ public static void checkNativeCodeLoaded() { if (!NativeCodeLoader.isNativeCodeLoaded() || !NativeCodeLoader.buildSupportsSnappy()) { throw new RuntimeException("native snappy library not available: " + "this version of libhadoop was built without " + "snappy support."); } if (!SnappyCompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyCompressor has not been loaded."); } if (!SnappyDecompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyDecompressor has not been loaded."); } }
Example #5
Source File: TestCompressorDecompressor.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testCompressorDecompressorWithExeedBufferLimit() { int BYTE_SIZE = 100 * 1024; byte[] rawData = generate(BYTE_SIZE); try { CompressDecompressTester.of(rawData) .withCompressDecompressPair( new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2), new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2)) .withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE), new Lz4Decompressor(BYTE_SIZE)) .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS, CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM)) .test(); } catch (Exception ex) { fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex); } }
Example #6
Source File: TestCompressorDecompressor.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testCompressorDecompressor() { // no more for this data int SIZE = 44 * 1024; byte[] rawData = generate(SIZE); try { CompressDecompressTester.of(rawData) .withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor()) .withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor()) .withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater()) .withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS, CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM)) .test(); } catch (Exception ex) { fail("testCompressorDecompressor error !!!" + ex); } }
Example #7
Source File: SnappyCodec.java From RDFS with Apache License 2.0 | 5 votes |
/** * Create a new {@link Decompressor} for use by this * {@link CompressionCodec}. * * @return a new decompressor for use by this codec */ @Override public Decompressor createDecompressor() { if (!isNativeSnappyLoaded(conf)) { throw new RuntimeException("native snappy library not available"); } int bufferSize = conf.getInt( IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY, IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT); return new SnappyDecompressor(bufferSize); }
Example #8
Source File: SnappyCodec.java From RDFS with Apache License 2.0 | 5 votes |
/** * Get the type of {@link Decompressor} needed by this * {@link CompressionCodec}. * * @return the type of decompressor needed by this codec. */ @Override public Class<? extends Decompressor> getDecompressorType() { if (!isNativeSnappyLoaded(conf)) { throw new RuntimeException("native snappy library not available"); } return SnappyDecompressor.class; }
Example #9
Source File: SnappyCodec.java From hadoop with Apache License 2.0 | 5 votes |
/** * Create a new {@link Decompressor} for use by this {@link CompressionCodec}. * * @return a new decompressor for use by this codec */ @Override public Decompressor createDecompressor() { checkNativeCodeLoaded(); int bufferSize = conf.getInt( CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY, CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT); return new SnappyDecompressor(bufferSize); }
Example #10
Source File: SnappyCodec.java From big-c with Apache License 2.0 | 5 votes |
/** * Create a new {@link Decompressor} for use by this {@link CompressionCodec}. * * @return a new decompressor for use by this codec */ @Override public Decompressor createDecompressor() { checkNativeCodeLoaded(); int bufferSize = conf.getInt( CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY, CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT); return new SnappyDecompressor(bufferSize); }
Example #11
Source File: SnappyCodec.java From big-c with Apache License 2.0 | 4 votes |
public static boolean isNativeCodeLoaded() { return SnappyCompressor.isNativeCodeLoaded() && SnappyDecompressor.isNativeCodeLoaded(); }
Example #12
Source File: SnappyCodec.java From datacollector with Apache License 2.0 | 4 votes |
@Override public Decompressor createDecompressor() { return new SnappyDecompressor(getBufferSize()); }
Example #13
Source File: SnappyCodec.java From datacollector with Apache License 2.0 | 4 votes |
public SnappyCodec() { super(SnappyCompressor.class, SnappyDecompressor.class, ".snappy"); }
Example #14
Source File: SnappyCodec.java From big-c with Apache License 2.0 | 4 votes |
/** * Get the type of {@link Decompressor} needed by this {@link CompressionCodec}. * * @return the type of decompressor needed by this codec. */ @Override public Class<? extends Decompressor> getDecompressorType() { checkNativeCodeLoaded(); return SnappyDecompressor.class; }
Example #15
Source File: ScanWithHiveReader.java From dremio-oss with Apache License 2.0 | 4 votes |
private static Class<? extends HiveAbstractReader> getNativeReaderClass(Optional<String> formatName, OptionManager options, Configuration configuration, boolean mixedSchema, boolean isTransactional) { if (!formatName.isPresent()) { return HiveDefaultReader.class; } Class<? extends HiveAbstractReader> readerClass = readerMap.get(formatName.get()); if (readerClass == HiveOrcReader.class) { // Validate reader if (OrcConf.USE_ZEROCOPY.getBoolean(configuration)) { if (!NativeCodeLoader.isNativeCodeLoaded()) { throw UserException.dataReadError() .message("Hadoop native library is required for Hive ORC data, but is not loaded").build(logger); } // TODO: find a way to access compression codec information? if (!SnappyDecompressor.isNativeCodeLoaded()) { throw UserException.dataReadError() .message("Snappy native library is required for Hive ORC data, but is not loaded").build(logger); } if (!isNativeZlibLoaded) { throw UserException .dataReadError() .message("Zlib native library is required for Hive ORC data, but is not loaded") .build(logger); } } if (new HiveSettings(options).vectorizeOrcReaders() && !mixedSchema && !isTransactional) { // We don't use vectorized ORC reader if there is a schema change between table and partitions or the table is // a transactional Hive table return HiveORCVectorizedReader.class; } } if (readerClass == null) { return HiveDefaultReader.class; } return readerClass; }
Example #16
Source File: ScanWithHiveReader.java From dremio-oss with Apache License 2.0 | 4 votes |
private static Class<? extends HiveAbstractReader> getNativeReaderClass(Optional<String> formatName, OptionManager options, Configuration configuration, boolean mixedSchema, boolean isTransactional) { if (!formatName.isPresent()) { return HiveDefaultReader.class; } Class<? extends HiveAbstractReader> readerClass = readerMap.get(formatName.get()); if (readerClass == HiveOrcReader.class) { // Validate reader if (OrcConf.USE_ZEROCOPY.getBoolean(configuration)) { if (!NativeCodeLoader.isNativeCodeLoaded()) { throw UserException.dataReadError() .message("Hadoop native library is required for Hive ORC data, but is not loaded").build(logger); } // TODO: find a way to access compression codec information? if (!SnappyDecompressor.isNativeCodeLoaded()) { throw UserException.dataReadError() .message("Snappy native library is required for Hive ORC data, but is not loaded").build(logger); } if (!isNativeZlibLoaded) { throw UserException .dataReadError() .message("Zlib native library is required for Hive ORC data, but is not loaded") .build(logger); } } if (new HiveSettings(options).vectorizeOrcReaders() && !mixedSchema && !isTransactional) { // We don't use vectorized ORC reader if there is a schema change between table and partitions or the table is // a transactional Hive table return HiveORCVectorizedReader.class; } } if (readerClass == null) { return HiveDefaultReader.class; } return readerClass; }
Example #17
Source File: SnappyCodec.java From hadoop with Apache License 2.0 | 4 votes |
/** * Get the type of {@link Decompressor} needed by this {@link CompressionCodec}. * * @return the type of decompressor needed by this codec. */ @Override public Class<? extends Decompressor> getDecompressorType() { checkNativeCodeLoaded(); return SnappyDecompressor.class; }
Example #18
Source File: SnappyCodec.java From hadoop with Apache License 2.0 | 4 votes |
public static boolean isNativeCodeLoaded() { return SnappyCompressor.isNativeCodeLoaded() && SnappyDecompressor.isNativeCodeLoaded(); }