Java Code Examples for org.apache.hadoop.io.SequenceFile.CompressionType#RECORD
The following examples show how to use
org.apache.hadoop.io.SequenceFile.CompressionType#RECORD .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SequenceFileAsBinaryOutputFormat.java From hadoop with Apache License 2.0 | 5 votes |
@Override public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException { super.checkOutputSpecs(ignored, job); if (getCompressOutput(job) && getOutputCompressionType(job) == CompressionType.RECORD ){ throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat " + "doesn't support Record Compression" ); } }
Example 2
Source File: SequenceFileAsBinaryOutputFormat.java From hadoop with Apache License 2.0 | 5 votes |
@Override public void checkOutputSpecs(JobContext job) throws IOException { super.checkOutputSpecs(job); if (getCompressOutput(job) && getOutputCompressionType(job) == CompressionType.RECORD ) { throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat " + "doesn't support Record Compression" ); } }
Example 3
Source File: TestArrayFile.java From hadoop with Apache License 2.0 | 5 votes |
/** * test on {@link ArrayFile.Reader} iteration methods * <pre> * {@code next(), seek()} in and out of range. * </pre> */ public void testArrayFileIteration() { int SIZE = 10; Configuration conf = new Configuration(); try { FileSystem fs = FileSystem.get(conf); ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, TEST_FILE, LongWritable.class, CompressionType.RECORD, defaultProgressable); assertNotNull("testArrayFileIteration error !!!", writer); for (int i = 0; i < SIZE; i++) writer.append(new LongWritable(i)); writer.close(); ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf); LongWritable nextWritable = new LongWritable(0); for (int i = 0; i < SIZE; i++) { nextWritable = (LongWritable)reader.next(nextWritable); assertEquals(nextWritable.get(), i); } assertTrue("testArrayFileIteration seek error !!!", reader.seek(new LongWritable(6))); nextWritable = (LongWritable) reader.next(nextWritable); assertTrue("testArrayFileIteration error !!!", reader.key() == 7); assertTrue("testArrayFileIteration error !!!", nextWritable.equals(new LongWritable(7))); assertFalse("testArrayFileIteration error !!!", reader.seek(new LongWritable(SIZE + 5))); reader.close(); } catch (Exception ex) { fail("testArrayFileWriterConstruction error !!!"); } }
Example 4
Source File: SequenceFileAsBinaryOutputFormat.java From big-c with Apache License 2.0 | 5 votes |
@Override public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException { super.checkOutputSpecs(ignored, job); if (getCompressOutput(job) && getOutputCompressionType(job) == CompressionType.RECORD ){ throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat " + "doesn't support Record Compression" ); } }
Example 5
Source File: SequenceFileAsBinaryOutputFormat.java From big-c with Apache License 2.0 | 5 votes |
@Override public void checkOutputSpecs(JobContext job) throws IOException { super.checkOutputSpecs(job); if (getCompressOutput(job) && getOutputCompressionType(job) == CompressionType.RECORD ) { throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat " + "doesn't support Record Compression" ); } }
Example 6
Source File: TestArrayFile.java From big-c with Apache License 2.0 | 5 votes |
/** * test on {@link ArrayFile.Reader} iteration methods * <pre> * {@code next(), seek()} in and out of range. * </pre> */ public void testArrayFileIteration() { int SIZE = 10; Configuration conf = new Configuration(); try { FileSystem fs = FileSystem.get(conf); ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, TEST_FILE, LongWritable.class, CompressionType.RECORD, defaultProgressable); assertNotNull("testArrayFileIteration error !!!", writer); for (int i = 0; i < SIZE; i++) writer.append(new LongWritable(i)); writer.close(); ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf); LongWritable nextWritable = new LongWritable(0); for (int i = 0; i < SIZE; i++) { nextWritable = (LongWritable)reader.next(nextWritable); assertEquals(nextWritable.get(), i); } assertTrue("testArrayFileIteration seek error !!!", reader.seek(new LongWritable(6))); nextWritable = (LongWritable) reader.next(nextWritable); assertTrue("testArrayFileIteration error !!!", reader.key() == 7); assertTrue("testArrayFileIteration error !!!", nextWritable.equals(new LongWritable(7))); assertFalse("testArrayFileIteration error !!!", reader.seek(new LongWritable(SIZE + 5))); reader.close(); } catch (Exception ex) { fail("testArrayFileWriterConstruction error !!!"); } }
Example 7
Source File: SequenceFileAsBinaryOutputFormat.java From RDFS with Apache License 2.0 | 5 votes |
@Override public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException { super.checkOutputSpecs(ignored, job); if (getCompressOutput(job) && getOutputCompressionType(job) == CompressionType.RECORD ){ throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat " + "doesn't support Record Compression" ); } }
Example 8
Source File: SequenceFileAsBinaryOutputFormat.java From hadoop-gpu with Apache License 2.0 | 5 votes |
@Override public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException { super.checkOutputSpecs(ignored, job); if (getCompressOutput(job) && getOutputCompressionType(job) == CompressionType.RECORD ){ throw new InvalidJobConfException("SequenceFileAsBinaryOutputFormat " + "doesn't support Record Compression" ); } }
Example 9
Source File: TestBloomMapFile.java From hadoop with Apache License 2.0 | 4 votes |
/** * test {@code BloomMapFile.Writer} constructors */ @SuppressWarnings("deprecation") public void testBloomMapFileConstructors() { BloomMapFile.Writer writer = null; try { FileSystem ts = FileSystem.get(conf); String testFileName = TEST_FILE.toString(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.BLOCK, defaultCodec, defaultProgress); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.BLOCK, defaultProgress); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.BLOCK); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.RECORD, defaultCodec, defaultProgress); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.RECORD, defaultProgress); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.RECORD); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, WritableComparator.get(Text.class), Text.class); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); } catch (Exception ex) { fail("testBloomMapFileConstructors error !!!"); } finally { IOUtils.cleanup(null, writer); } }
Example 10
Source File: TestMapFile.java From hadoop with Apache License 2.0 | 4 votes |
/** * test all available constructor for {@code MapFile.Writer} */ @Test @SuppressWarnings("deprecation") public void testDeprecatedConstructors() { String path = new Path(TEST_DIR, "writes.mapfile").toString(); MapFile.Writer writer = null; MapFile.Reader reader = null; try { FileSystem fs = FileSystem.getLocal(conf); writer = new MapFile.Writer(conf, fs, path, IntWritable.class, Text.class, CompressionType.RECORD); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, IntWritable.class, Text.class, CompressionType.RECORD, defaultProgressable); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, IntWritable.class, Text.class, CompressionType.RECORD, defaultCodec, defaultProgressable); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, WritableComparator.get(Text.class), Text.class); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, WritableComparator.get(Text.class), Text.class, SequenceFile.CompressionType.RECORD); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, WritableComparator.get(Text.class), Text.class, CompressionType.RECORD, defaultProgressable); assertNotNull(writer); writer.close(); reader = new MapFile.Reader(fs, path, WritableComparator.get(IntWritable.class), conf); assertNotNull(reader); assertNotNull("reader key is null !!!", reader.getKeyClass()); assertNotNull("reader value in null", reader.getValueClass()); } catch (IOException e) { fail(e.getMessage()); } finally { IOUtils.cleanup(null, writer, reader); } }
Example 11
Source File: TestBloomMapFile.java From big-c with Apache License 2.0 | 4 votes |
/** * test {@code BloomMapFile.Writer} constructors */ @SuppressWarnings("deprecation") public void testBloomMapFileConstructors() { BloomMapFile.Writer writer = null; try { FileSystem ts = FileSystem.get(conf); String testFileName = TEST_FILE.toString(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.BLOCK, defaultCodec, defaultProgress); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.BLOCK, defaultProgress); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.BLOCK); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.RECORD, defaultCodec, defaultProgress); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.RECORD, defaultProgress); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.RECORD); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, WritableComparator.get(Text.class), Text.class); assertNotNull("testBloomMapFileConstructors error !!!", writer); writer.close(); } catch (Exception ex) { fail("testBloomMapFileConstructors error !!!"); } finally { IOUtils.cleanup(null, writer); } }
Example 12
Source File: TestMapFile.java From big-c with Apache License 2.0 | 4 votes |
/** * test all available constructor for {@code MapFile.Writer} */ @Test @SuppressWarnings("deprecation") public void testDeprecatedConstructors() { String path = new Path(TEST_DIR, "writes.mapfile").toString(); MapFile.Writer writer = null; MapFile.Reader reader = null; try { FileSystem fs = FileSystem.getLocal(conf); writer = new MapFile.Writer(conf, fs, path, IntWritable.class, Text.class, CompressionType.RECORD); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, IntWritable.class, Text.class, CompressionType.RECORD, defaultProgressable); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, IntWritable.class, Text.class, CompressionType.RECORD, defaultCodec, defaultProgressable); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, WritableComparator.get(Text.class), Text.class); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, WritableComparator.get(Text.class), Text.class, SequenceFile.CompressionType.RECORD); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, WritableComparator.get(Text.class), Text.class, CompressionType.RECORD, defaultProgressable); assertNotNull(writer); writer.close(); reader = new MapFile.Reader(fs, path, WritableComparator.get(IntWritable.class), conf); assertNotNull(reader); assertNotNull("reader key is null !!!", reader.getKeyClass()); assertNotNull("reader value in null", reader.getValueClass()); } catch (IOException e) { fail(e.getMessage()); } finally { IOUtils.cleanup(null, writer, reader); } }