org.apache.hadoop.io.SequenceFile.CompressionType Scala Examples
The following examples show how to use org.apache.hadoop.io.SequenceFile.CompressionType.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: CompressionCodecs.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.SequenceFile.CompressionType import org.apache.hadoop.io.compress._ import org.apache.spark.util.Utils object CompressionCodecs { private val shortCompressionCodecNames = Map( "none" -> null, "uncompressed" -> null, "bzip2" -> classOf[BZip2Codec].getName, "deflate" -> classOf[DeflateCodec].getName, "gzip" -> classOf[GzipCodec].getName, "lz4" -> classOf[Lz4Codec].getName, "snappy" -> classOf[SnappyCodec].getName) def setCodecConfiguration(conf: Configuration, codec: String): Unit = { if (codec != null) { conf.set("mapreduce.output.fileoutputformat.compress", "true") conf.set("mapreduce.output.fileoutputformat.compress.type", CompressionType.BLOCK.toString) conf.set("mapreduce.output.fileoutputformat.compress.codec", codec) conf.set("mapreduce.map.output.compress", "true") conf.set("mapreduce.map.output.compress.codec", codec) } else { // This infers the option `compression` is set to `uncompressed` or `none`. conf.set("mapreduce.output.fileoutputformat.compress", "false") conf.set("mapreduce.map.output.compress", "false") } } }
Example 2
Source File: CompressionCodecs.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import java.util.Locale import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.SequenceFile.CompressionType import org.apache.hadoop.io.compress._ import org.apache.spark.util.Utils object CompressionCodecs { private val shortCompressionCodecNames = Map( "none" -> null, "uncompressed" -> null, "bzip2" -> classOf[BZip2Codec].getName, "deflate" -> classOf[DeflateCodec].getName, "gzip" -> classOf[GzipCodec].getName, "lz4" -> classOf[Lz4Codec].getName, "snappy" -> classOf[SnappyCodec].getName) def setCodecConfiguration(conf: Configuration, codec: String): Unit = { if (codec != null) { conf.set("mapreduce.output.fileoutputformat.compress", "true") conf.set("mapreduce.output.fileoutputformat.compress.type", CompressionType.BLOCK.toString) conf.set("mapreduce.output.fileoutputformat.compress.codec", codec) conf.set("mapreduce.map.output.compress", "true") conf.set("mapreduce.map.output.compress.codec", codec) } else { // This infers the option `compression` is set to `uncompressed` or `none`. conf.set("mapreduce.output.fileoutputformat.compress", "false") conf.set("mapreduce.map.output.compress", "false") } } }
Example 3
Source File: CompressionCodecs.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.SequenceFile.CompressionType import org.apache.hadoop.io.compress._ import org.apache.spark.util.Utils object CompressionCodecs { private val shortCompressionCodecNames = Map( "none" -> null, "uncompressed" -> null, "bzip2" -> classOf[BZip2Codec].getName, "deflate" -> classOf[DeflateCodec].getName, "gzip" -> classOf[GzipCodec].getName, "lz4" -> classOf[Lz4Codec].getName, "snappy" -> classOf[SnappyCodec].getName) def setCodecConfiguration(conf: Configuration, codec: String): Unit = { if (codec != null) { conf.set("mapreduce.output.fileoutputformat.compress", "true") conf.set("mapreduce.output.fileoutputformat.compress.type", CompressionType.BLOCK.toString) conf.set("mapreduce.output.fileoutputformat.compress.codec", codec) conf.set("mapreduce.map.output.compress", "true") conf.set("mapreduce.map.output.compress.codec", codec) } else { // This infers the option `compression` is set to `uncompressed` or `none`. conf.set("mapreduce.output.fileoutputformat.compress", "false") conf.set("mapreduce.map.output.compress", "false") } } }
Example 4
Source File: CompressionCodecs.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.SequenceFile.CompressionType import org.apache.hadoop.io.compress._ import org.apache.spark.util.Utils object CompressionCodecs { private val shortCompressionCodecNames = Map( "none" -> null, "uncompressed" -> null, "bzip2" -> classOf[BZip2Codec].getName, "deflate" -> classOf[DeflateCodec].getName, "gzip" -> classOf[GzipCodec].getName, "lz4" -> classOf[Lz4Codec].getName, "snappy" -> classOf[SnappyCodec].getName) def setCodecConfiguration(conf: Configuration, codec: String): Unit = { if (codec != null) { conf.set("mapreduce.output.fileoutputformat.compress", "true") conf.set("mapreduce.output.fileoutputformat.compress.type", CompressionType.BLOCK.toString) conf.set("mapreduce.output.fileoutputformat.compress.codec", codec) conf.set("mapreduce.map.output.compress", "true") conf.set("mapreduce.map.output.compress.codec", codec) } else { // This infers the option `compression` is set to `uncompressed` or `none`. conf.set("mapreduce.output.fileoutputformat.compress", "false") conf.set("mapreduce.map.output.compress", "false") } } }
Example 5
Source File: CompressionCodecs.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import java.util.Locale import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.SequenceFile.CompressionType import org.apache.hadoop.io.compress._ import org.apache.spark.util.Utils object CompressionCodecs { private val shortCompressionCodecNames = Map( "none" -> null, "uncompressed" -> null, "bzip2" -> classOf[BZip2Codec].getName, "deflate" -> classOf[DeflateCodec].getName, "gzip" -> classOf[GzipCodec].getName, "lz4" -> classOf[Lz4Codec].getName, "snappy" -> classOf[SnappyCodec].getName) def setCodecConfiguration(conf: Configuration, codec: String): Unit = { if (codec != null) { conf.set("mapreduce.output.fileoutputformat.compress", "true") conf.set("mapreduce.output.fileoutputformat.compress.type", CompressionType.BLOCK.toString) conf.set("mapreduce.output.fileoutputformat.compress.codec", codec) conf.set("mapreduce.map.output.compress", "true") conf.set("mapreduce.map.output.compress.codec", codec) } else { // This infers the option `compression` is set to `uncompressed` or `none`. conf.set("mapreduce.output.fileoutputformat.compress", "false") conf.set("mapreduce.map.output.compress", "false") } } }