Java Code Examples for org.apache.hadoop.io.compress.snappy.SnappyDecompressor#isNativeCodeLoaded()
The following examples show how to use
org.apache.hadoop.io.compress.snappy.SnappyDecompressor#isNativeCodeLoaded() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SnappyCodec.java From hadoop with Apache License 2.0 | 6 votes |
/** * Are the native snappy libraries loaded & initialized? */ public static void checkNativeCodeLoaded() { if (!NativeCodeLoader.isNativeCodeLoaded() || !NativeCodeLoader.buildSupportsSnappy()) { throw new RuntimeException("native snappy library not available: " + "this version of libhadoop was built without " + "snappy support."); } if (!SnappyCompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyCompressor has not been loaded."); } if (!SnappyDecompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyDecompressor has not been loaded."); } }
Example 2
Source File: SnappyCodec.java From big-c with Apache License 2.0 | 6 votes |
/** * Are the native snappy libraries loaded & initialized? */ public static void checkNativeCodeLoaded() { if (!NativeCodeLoader.isNativeCodeLoaded() || !NativeCodeLoader.buildSupportsSnappy()) { throw new RuntimeException("native snappy library not available: " + "this version of libhadoop was built without " + "snappy support."); } if (!SnappyCompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyCompressor has not been loaded."); } if (!SnappyDecompressor.isNativeCodeLoaded()) { throw new RuntimeException("native snappy library not available: " + "SnappyDecompressor has not been loaded."); } }
Example 3
Source File: SnappyCodec.java From hadoop with Apache License 2.0 | 4 votes |
public static boolean isNativeCodeLoaded() { return SnappyCompressor.isNativeCodeLoaded() && SnappyDecompressor.isNativeCodeLoaded(); }
Example 4
Source File: ScanWithHiveReader.java From dremio-oss with Apache License 2.0 | 4 votes |
private static Class<? extends HiveAbstractReader> getNativeReaderClass(Optional<String> formatName, OptionManager options, Configuration configuration, boolean mixedSchema, boolean isTransactional) { if (!formatName.isPresent()) { return HiveDefaultReader.class; } Class<? extends HiveAbstractReader> readerClass = readerMap.get(formatName.get()); if (readerClass == HiveOrcReader.class) { // Validate reader if (OrcConf.USE_ZEROCOPY.getBoolean(configuration)) { if (!NativeCodeLoader.isNativeCodeLoaded()) { throw UserException.dataReadError() .message("Hadoop native library is required for Hive ORC data, but is not loaded").build(logger); } // TODO: find a way to access compression codec information? if (!SnappyDecompressor.isNativeCodeLoaded()) { throw UserException.dataReadError() .message("Snappy native library is required for Hive ORC data, but is not loaded").build(logger); } if (!isNativeZlibLoaded) { throw UserException .dataReadError() .message("Zlib native library is required for Hive ORC data, but is not loaded") .build(logger); } } if (new HiveSettings(options).vectorizeOrcReaders() && !mixedSchema && !isTransactional) { // We don't use vectorized ORC reader if there is a schema change between table and partitions or the table is // a transactional Hive table return HiveORCVectorizedReader.class; } } if (readerClass == null) { return HiveDefaultReader.class; } return readerClass; }
Example 5
Source File: ScanWithHiveReader.java From dremio-oss with Apache License 2.0 | 4 votes |
private static Class<? extends HiveAbstractReader> getNativeReaderClass(Optional<String> formatName, OptionManager options, Configuration configuration, boolean mixedSchema, boolean isTransactional) { if (!formatName.isPresent()) { return HiveDefaultReader.class; } Class<? extends HiveAbstractReader> readerClass = readerMap.get(formatName.get()); if (readerClass == HiveOrcReader.class) { // Validate reader if (OrcConf.USE_ZEROCOPY.getBoolean(configuration)) { if (!NativeCodeLoader.isNativeCodeLoaded()) { throw UserException.dataReadError() .message("Hadoop native library is required for Hive ORC data, but is not loaded").build(logger); } // TODO: find a way to access compression codec information? if (!SnappyDecompressor.isNativeCodeLoaded()) { throw UserException.dataReadError() .message("Snappy native library is required for Hive ORC data, but is not loaded").build(logger); } if (!isNativeZlibLoaded) { throw UserException .dataReadError() .message("Zlib native library is required for Hive ORC data, but is not loaded") .build(logger); } } if (new HiveSettings(options).vectorizeOrcReaders() && !mixedSchema && !isTransactional) { // We don't use vectorized ORC reader if there is a schema change between table and partitions or the table is // a transactional Hive table return HiveORCVectorizedReader.class; } } if (readerClass == null) { return HiveDefaultReader.class; } return readerClass; }
Example 6
Source File: SnappyCodec.java From big-c with Apache License 2.0 | 4 votes |
public static boolean isNativeCodeLoaded() { return SnappyCompressor.isNativeCodeLoaded() && SnappyDecompressor.isNativeCodeLoaded(); }