org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator Java Examples
The following examples show how to use
org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestSequenceFileMergeProgress.java From hadoop with Apache License 2.0 | 4 votes |
public void runTest(CompressionType compressionType) throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred"); Path file = new Path(dir, "test.seq"); Path tempDir = new Path(dir, "tmp"); fs.delete(dir, true); FileInputFormat.setInputPaths(job, dir); fs.mkdirs(tempDir); LongWritable tkey = new LongWritable(); Text tval = new Text(); SequenceFile.Writer writer = SequenceFile.createWriter(fs, job, file, LongWritable.class, Text.class, compressionType, new DefaultCodec()); try { for (int i = 0; i < RECORDS; ++i) { tkey.set(1234); tval.set("valuevaluevaluevaluevaluevaluevaluevaluevaluevaluevalue"); writer.append(tkey, tval); } } finally { writer.close(); } long fileLength = fs.getFileStatus(file).getLen(); LOG.info("With compression = " + compressionType + ": " + "compressed length = " + fileLength); SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, job.getOutputKeyComparator(), job.getMapOutputKeyClass(), job.getMapOutputValueClass(), job); Path[] paths = new Path[] {file}; RawKeyValueIterator rIter = sorter.merge(paths, tempDir, false); int count = 0; while (rIter.next()) { count++; } assertEquals(RECORDS, count); assertEquals(1.0f, rIter.getProgress().get()); }
Example #2
Source File: MergeSorter.java From hadoop with Apache License 2.0 | 4 votes |
/** The sort method derived from BasicTypeSorterBase and overridden here*/ public RawKeyValueIterator sort() { MergeSort m = new MergeSort(this); int count = super.count; if (count == 0) return null; int [] pointers = super.pointers; int [] pointersCopy = new int[count]; System.arraycopy(pointers, 0, pointersCopy, 0, count); m.mergeSort(pointers, pointersCopy, 0, count); return new MRSortResultIterator(super.keyValBuffer, pointersCopy, super.startOffsets, super.keyLengths, super.valueLengths); }
Example #3
Source File: TestSequenceFileMergeProgress.java From big-c with Apache License 2.0 | 4 votes |
public void runTest(CompressionType compressionType) throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred"); Path file = new Path(dir, "test.seq"); Path tempDir = new Path(dir, "tmp"); fs.delete(dir, true); FileInputFormat.setInputPaths(job, dir); fs.mkdirs(tempDir); LongWritable tkey = new LongWritable(); Text tval = new Text(); SequenceFile.Writer writer = SequenceFile.createWriter(fs, job, file, LongWritable.class, Text.class, compressionType, new DefaultCodec()); try { for (int i = 0; i < RECORDS; ++i) { tkey.set(1234); tval.set("valuevaluevaluevaluevaluevaluevaluevaluevaluevaluevalue"); writer.append(tkey, tval); } } finally { writer.close(); } long fileLength = fs.getFileStatus(file).getLen(); LOG.info("With compression = " + compressionType + ": " + "compressed length = " + fileLength); SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, job.getOutputKeyComparator(), job.getMapOutputKeyClass(), job.getMapOutputValueClass(), job); Path[] paths = new Path[] {file}; RawKeyValueIterator rIter = sorter.merge(paths, tempDir, false); int count = 0; while (rIter.next()) { count++; } assertEquals(RECORDS, count); assertEquals(1.0f, rIter.getProgress().get()); }
Example #4
Source File: MergeSorter.java From big-c with Apache License 2.0 | 4 votes |
/** The sort method derived from BasicTypeSorterBase and overridden here*/ public RawKeyValueIterator sort() { MergeSort m = new MergeSort(this); int count = super.count; if (count == 0) return null; int [] pointers = super.pointers; int [] pointersCopy = new int[count]; System.arraycopy(pointers, 0, pointersCopy, 0, count); m.mergeSort(pointers, pointersCopy, 0, count); return new MRSortResultIterator(super.keyValBuffer, pointersCopy, super.startOffsets, super.keyLengths, super.valueLengths); }
Example #5
Source File: MergeSorter.java From RDFS with Apache License 2.0 | 4 votes |
/** The sort method derived from BasicTypeSorterBase and overridden here*/ public RawKeyValueIterator sort() { MergeSort m = new MergeSort(this); int count = super.count; if (count == 0) return null; int [] pointers = super.pointers; int [] pointersCopy = new int[count]; System.arraycopy(pointers, 0, pointersCopy, 0, count); m.mergeSort(pointers, pointersCopy, 0, count); return new MRSortResultIterator(super.keyValBuffer, pointersCopy, super.startOffsets, super.keyLengths, super.valueLengths); }
Example #6
Source File: TestSequenceFileMergeProgress.java From RDFS with Apache License 2.0 | 4 votes |
public void runTest(CompressionType compressionType) throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred"); Path file = new Path(dir, "test.seq"); Path tempDir = new Path(dir, "tmp"); fs.delete(dir, true); FileInputFormat.setInputPaths(job, dir); fs.mkdirs(tempDir); LongWritable tkey = new LongWritable(); Text tval = new Text(); SequenceFile.Writer writer = SequenceFile.createWriter(fs, job, file, LongWritable.class, Text.class, compressionType, new DefaultCodec()); try { for (int i = 0; i < RECORDS; ++i) { tkey.set(1234); tval.set("valuevaluevaluevaluevaluevaluevaluevaluevaluevaluevalue"); writer.append(tkey, tval); } } finally { writer.close(); } long fileLength = fs.getFileStatus(file).getLen(); LOG.info("With compression = " + compressionType + ": " + "compressed length = " + fileLength); SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, job.getOutputKeyComparator(), job.getMapOutputKeyClass(), job.getMapOutputValueClass(), job); Path[] paths = new Path[] {file}; RawKeyValueIterator rIter = sorter.merge(paths, tempDir, false); int count = 0; while (rIter.next()) { count++; } assertEquals(RECORDS, count); assertEquals(1.0f, rIter.getProgress().get()); }
Example #7
Source File: MergeSorter.java From hadoop-gpu with Apache License 2.0 | 4 votes |
/** The sort method derived from BasicTypeSorterBase and overridden here*/ public RawKeyValueIterator sort() { MergeSort m = new MergeSort(this); int count = super.count; if (count == 0) return null; int [] pointers = super.pointers; int [] pointersCopy = new int[count]; System.arraycopy(pointers, 0, pointersCopy, 0, count); m.mergeSort(pointers, pointersCopy, 0, count); return new MRSortResultIterator(super.keyValBuffer, pointersCopy, super.startOffsets, super.keyLengths, super.valueLengths); }
Example #8
Source File: TestSequenceFileMergeProgress.java From hadoop-gpu with Apache License 2.0 | 4 votes |
public void runTest(CompressionType compressionType) throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred"); Path file = new Path(dir, "test.seq"); Path tempDir = new Path(dir, "tmp"); fs.delete(dir, true); FileInputFormat.setInputPaths(job, dir); fs.mkdirs(tempDir); LongWritable tkey = new LongWritable(); Text tval = new Text(); SequenceFile.Writer writer = SequenceFile.createWriter(fs, job, file, LongWritable.class, Text.class, compressionType, new DefaultCodec()); try { for (int i = 0; i < RECORDS; ++i) { tkey.set(1234); tval.set("valuevaluevaluevaluevaluevaluevaluevaluevaluevaluevalue"); writer.append(tkey, tval); } } finally { writer.close(); } long fileLength = fs.getFileStatus(file).getLen(); LOG.info("With compression = " + compressionType + ": " + "compressed length = " + fileLength); SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, job.getOutputKeyComparator(), job.getMapOutputKeyClass(), job.getMapOutputValueClass(), job); Path[] paths = new Path[] {file}; RawKeyValueIterator rIter = sorter.merge(paths, tempDir, false); int count = 0; while (rIter.next()) { count++; } assertEquals(RECORDS, count); assertEquals(1.0f, rIter.getProgress().get()); }
Example #9
Source File: BufferSorter.java From hadoop with Apache License 2.0 | 2 votes |
/** Framework decides when to actually sort */ public RawKeyValueIterator sort();
Example #10
Source File: BufferSorter.java From big-c with Apache License 2.0 | 2 votes |
/** Framework decides when to actually sort */ public RawKeyValueIterator sort();
Example #11
Source File: BufferSorter.java From RDFS with Apache License 2.0 | 2 votes |
/** Framework decides when to actually sort */ public RawKeyValueIterator sort();
Example #12
Source File: BufferSorter.java From hadoop-gpu with Apache License 2.0 | 2 votes |
/** Framework decides when to actually sort */ public RawKeyValueIterator sort();
Example #13
Source File: BasicTypeSorterBase.java From hadoop with Apache License 2.0 | votes |
public abstract RawKeyValueIterator sort();
Example #14
Source File: BasicTypeSorterBase.java From big-c with Apache License 2.0 | votes |
public abstract RawKeyValueIterator sort();
Example #15
Source File: BasicTypeSorterBase.java From RDFS with Apache License 2.0 | votes |
public abstract RawKeyValueIterator sort();
Example #16
Source File: BasicTypeSorterBase.java From hadoop-gpu with Apache License 2.0 | votes |
public abstract RawKeyValueIterator sort();