Java Code Examples for org.apache.hadoop.io.DataOutputBuffer#getData()
The following examples show how to use
org.apache.hadoop.io.DataOutputBuffer#getData() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Chain.java From hadoop with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example 2
Source File: HFilesystemAdmin.java From spliceengine with GNU Affero General Public License v3.0 | 6 votes |
private static byte[] toByteArray(Writable... writables) { final DataOutputBuffer out = new DataOutputBuffer(); try { for(Writable w : writables) { w.write(out); } out.close(); } catch (IOException e) { throw new RuntimeException("Fail to convert writables to a byte array",e); } byte[] bytes = out.getData(); if (bytes.length == out.getLength()) { return bytes; } byte[] result = new byte[out.getLength()]; System.arraycopy(bytes, 0, result, 0, out.getLength()); return result; }
Example 3
Source File: QueryWritableTest.java From incubator-retired-blur with Apache License 2.0 | 6 votes |
@Test public void testTermQuery() throws IOException { TermQuery query = new TermQuery(new Term("field", "value")); QueryWritable queryWritable = new QueryWritable(); queryWritable.setQuery(query); DataOutputBuffer out = new DataOutputBuffer(); queryWritable.write(out); byte[] data = out.getData(); int length = out.getLength(); DataInputBuffer in = new DataInputBuffer(); in.reset(data, length); QueryWritable newQueryWritable = new QueryWritable(); newQueryWritable.readFields(in); Query termQuery = newQueryWritable.getQuery(); assertEquals(query, termQuery); }
Example 4
Source File: LaunchContainerRunnable.java From attic-apex-core with Apache License 2.0 | 6 votes |
public static ByteBuffer getTokens(UserGroupInformation ugi, Token<StramDelegationTokenIdentifier> delegationToken) { try { Collection<Token<? extends TokenIdentifier>> tokens = ugi.getCredentials().getAllTokens(); Credentials credentials = new Credentials(); for (Token<? extends TokenIdentifier> token : tokens) { if (!token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) { credentials.addToken(token.getService(), token); LOG.debug("Passing container token {}", token); } } credentials.addToken(delegationToken.getService(), delegationToken); DataOutputBuffer dataOutput = new DataOutputBuffer(); credentials.writeTokenStorageToStream(dataOutput); byte[] tokenBytes = dataOutput.getData(); ByteBuffer cTokenBuf = ByteBuffer.wrap(tokenBytes); return cTokenBuf.duplicate(); } catch (IOException e) { throw new RuntimeException("Error generating delegation token", e); } }
Example 5
Source File: TestIndexedSort.java From big-c with Apache License 2.0 | 6 votes |
public WritableSortable(int j) throws IOException { seed = r.nextLong(); r.setSeed(seed); Text t = new Text(); StringBuilder sb = new StringBuilder(); indices = new int[j]; offsets = new int[j]; check = new String[j]; DataOutputBuffer dob = new DataOutputBuffer(); for (int i = 0; i < j; ++i) { indices[i] = i; offsets[i] = dob.getLength(); genRandom(t, r.nextInt(15) + 1, sb); t.write(dob); check[i] = t.toString(); } eob = dob.getLength(); bytes = dob.getData(); comparator = WritableComparator.get(Text.class); }
Example 6
Source File: CryptoStreamsTestBase.java From big-c with Apache License 2.0 | 6 votes |
@Before public void setUp() throws IOException { // Generate data final int seed = new Random().nextInt(); final DataOutputBuffer dataBuf = new DataOutputBuffer(); final RandomDatum.Generator generator = new RandomDatum.Generator(seed); for(int i = 0; i < count; ++i) { generator.next(); final RandomDatum key = generator.getKey(); final RandomDatum value = generator.getValue(); key.write(dataBuf); value.write(dataBuf); } LOG.info("Generated " + count + " records"); data = dataBuf.getData(); dataLen = dataBuf.getLength(); }
Example 7
Source File: Chain.java From hadoop-gpu with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example 8
Source File: Chain.java From big-c with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example 9
Source File: TestClientKeyValueLocal.java From phoenix with BSD 3-Clause "New" or "Revised" License | 6 votes |
private void validate(KeyValue kv, byte[] row, byte[] family, byte[] qualifier, long ts, Type type, byte[] value) throws IOException { DataOutputBuffer out = new DataOutputBuffer(); kv.write(out); out.close(); byte[] data = out.getData(); // read it back in KeyValue read = new KeyValue(); DataInputBuffer in = new DataInputBuffer(); in.reset(data, data.length); read.readFields(in); in.close(); // validate that its the same assertTrue("Row didn't match!", Bytes.equals(row, read.getRow())); assertTrue("Family didn't match!", Bytes.equals(family, read.getFamily())); assertTrue("Qualifier didn't match!", Bytes.equals(qualifier, read.getQualifier())); assertTrue("Value didn't match!", Bytes.equals(value, read.getValue())); assertEquals("Timestamp didn't match", ts, read.getTimestamp()); assertEquals("Type didn't match", type.getCode(), read.getType()); }
Example 10
Source File: CryptoStreamsTestBase.java From hadoop with Apache License 2.0 | 6 votes |
@Before public void setUp() throws IOException { // Generate data final int seed = new Random().nextInt(); final DataOutputBuffer dataBuf = new DataOutputBuffer(); final RandomDatum.Generator generator = new RandomDatum.Generator(seed); for(int i = 0; i < count; ++i) { generator.next(); final RandomDatum key = generator.getKey(); final RandomDatum value = generator.getValue(); key.write(dataBuf); value.write(dataBuf); } LOG.info("Generated " + count + " records"); data = dataBuf.getData(); dataLen = dataBuf.getLength(); }
Example 11
Source File: TestDFSPacket.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testPacket() throws Exception { Random r = new Random(12345L); byte[] data = new byte[chunkSize]; r.nextBytes(data); byte[] checksum = new byte[checksumSize]; r.nextBytes(checksum); DataOutputBuffer os = new DataOutputBuffer(data.length * 2); byte[] packetBuf = new byte[data.length * 2]; DFSPacket p = new DFSPacket(packetBuf, maxChunksPerPacket, 0, 0, checksumSize, false); p.setSyncBlock(true); p.writeData(data, 0, data.length); p.writeChecksum(checksum, 0, checksum.length); p.writeTo(os); //we have set syncBlock to true, so the header has the maximum length int headerLen = PacketHeader.PKT_MAX_HEADER_LEN; byte[] readBuf = os.getData(); assertArrayRegionsEqual(readBuf, headerLen, checksum, 0, checksum.length); assertArrayRegionsEqual(readBuf, headerLen + checksum.length, data, 0, data.length); }
Example 12
Source File: TestDelegationTokenRemoteFetcher.java From hadoop with Apache License 2.0 | 6 votes |
@Override public void handle(Channel channel, Token<DelegationTokenIdentifier> token, String serviceUrl) throws IOException { Assert.assertEquals(testToken, token); Credentials creds = new Credentials(); creds.addToken(new Text(serviceUrl), token); DataOutputBuffer out = new DataOutputBuffer(); creds.write(out); int fileLength = out.getData().length; ChannelBuffer cbuffer = ChannelBuffers.buffer(fileLength); cbuffer.writeBytes(out.getData()); HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); response.setHeader(HttpHeaders.Names.CONTENT_LENGTH, String.valueOf(fileLength)); response.setContent(cbuffer); channel.write(response).addListener(ChannelFutureListener.CLOSE); }
Example 13
Source File: LaunchContainerRunnable.java From Bats with Apache License 2.0 | 6 votes |
public static ByteBuffer getTokens(UserGroupInformation ugi, Token<StramDelegationTokenIdentifier> delegationToken) { try { Collection<Token<? extends TokenIdentifier>> tokens = ugi.getCredentials().getAllTokens(); Credentials credentials = new Credentials(); for (Token<? extends TokenIdentifier> token : tokens) { if (!token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) { credentials.addToken(token.getService(), token); LOG.debug("Passing container token {}", token); } } credentials.addToken(delegationToken.getService(), delegationToken); DataOutputBuffer dataOutput = new DataOutputBuffer(); credentials.writeTokenStorageToStream(dataOutput); byte[] tokenBytes = dataOutput.getData(); ByteBuffer cTokenBuf = ByteBuffer.wrap(tokenBytes); return cTokenBuf.duplicate(); } catch (IOException e) { throw new RuntimeException("Error generating delegation token", e); } }
Example 14
Source File: TestCorruptFileBlocks.java From RDFS with Apache License 2.0 | 5 votes |
/** * Serialize the cfb given, deserialize and return the result. */ static CorruptFileBlocks serializeAndDeserialize(CorruptFileBlocks cfb) throws IOException { DataOutputBuffer buf = new DataOutputBuffer(); cfb.write(buf); byte[] data = buf.getData(); DataInputStream input = new DataInputStream(new ByteArrayInputStream(data)); CorruptFileBlocks result = new CorruptFileBlocks(); result.readFields(input); return result; }
Example 15
Source File: TestEntityDescriptor.java From tez with Apache License 2.0 | 5 votes |
public void testSingularWrite(InputDescriptor entityDescriptor, InputDescriptor deserialized, UserPayload payload, String confVal) throws IOException { DataOutputBuffer out = new DataOutputBuffer(); entityDescriptor.write(out); out.close(); ByteArrayOutputStream bos = new ByteArrayOutputStream(out.getData().length); bos.write(out.getData()); Mockito.verify(entityDescriptor).writeSingular(eq(out), any(ByteBuffer.class)); deserialized.readFields(new DataInputStream(new ByteArrayInputStream(bos.toByteArray()))); verifyResults(entityDescriptor, deserialized, payload, confVal); }
Example 16
Source File: FlinkXMLParser.java From incubator-retired-mrql with Apache License 2.0 | 4 votes |
public String slice () { if (splitter.hasNext()) { DataOutputBuffer b = splitter.next(); return new String(b.getData(),0,b.getLength()); } else return null; }
Example 17
Source File: FlinkJsonParser.java From incubator-retired-mrql with Apache License 2.0 | 4 votes |
public String slice () { if (splitter.hasNext()) { DataOutputBuffer b = splitter.next(); return new String(b.getData(),0,b.getLength()); } else return null; }
Example 18
Source File: JsonFormatParser.java From incubator-retired-mrql with Apache License 2.0 | 4 votes |
public String slice () { if (splitter.hasNext()) { DataOutputBuffer b = splitter.next(); return new String(b.getData(),0,b.getLength()); } else return null; }
Example 19
Source File: XMLParser.java From incubator-retired-mrql with Apache License 2.0 | 4 votes |
public String slice () { if (splitter.hasNext()) { DataOutputBuffer b = splitter.next(); return new String(b.getData(),0,b.getLength()); } else return null; }
Example 20
Source File: TestVCFOutputFormat.java From Hadoop-BAM with MIT License | 4 votes |
@Test public void testVariantContextReadWrite() throws IOException, InterruptedException { // This is to check whether issue https://github.com/HadoopGenomics/Hadoop-BAM/issues/1 has been // resolved VariantContextBuilder vctx_builder = new VariantContextBuilder(); ArrayList<Allele> alleles = new ArrayList<Allele>(); alleles.add(Allele.create("C", false)); alleles.add(Allele.create("G", true)); vctx_builder.alleles(alleles); ArrayList<Genotype> genotypes = new ArrayList<Genotype>(); GenotypeBuilder builder = new GenotypeBuilder(); genotypes.add(builder.alleles(alleles.subList(0, 1)).name("NA00001").GQ(48).DP(1).make()); genotypes.add(builder.alleles(alleles.subList(0, 1)).name("NA00002").GQ(42).DP(2).make()); genotypes.add(builder.alleles(alleles.subList(0, 1)).name("NA00003").GQ(39).DP(3).make()); vctx_builder.genotypes(genotypes); HashSet<String> filters = new HashSet<String>(); vctx_builder.filters(filters); HashMap<String, Object> attributes = new HashMap<String, Object>(); attributes.put("NS", new Integer(4)); vctx_builder.attributes(attributes); vctx_builder.loc("20", 2, 2); vctx_builder.log10PError(-8.0); VariantContext ctx = vctx_builder.make(); VariantContextWithHeader ctxh = new VariantContextWithHeader(ctx, readHeader()); writable.set(ctxh); DataOutputBuffer out = new DataOutputBuffer(1000); writable.write(out); byte[] data = out.getData(); ByteArrayInputStream bis = new ByteArrayInputStream(data); writable = new VariantContextWritable(); writable.readFields(new DataInputStream(bis)); VariantContext vc = writable.get(); Assert.assertArrayEquals("comparing Alleles",ctx.getAlleles().toArray(),vc.getAlleles().toArray()); Assert.assertEquals("comparing Log10PError",ctx.getLog10PError(),vc.getLog10PError(),0.01); Assert.assertArrayEquals("comparing Filters",ctx.getFilters().toArray(),vc.getFilters().toArray()); Assert.assertEquals("comparing Attributes",ctx.getAttributes(),vc.getAttributes()); // Now check the genotypes. Note: we need to make the header accessible before decoding the genotypes. GenotypesContext gc = vc.getGenotypes(); assert(gc instanceof LazyVCFGenotypesContext); LazyVCFGenotypesContext.HeaderDataCache headerDataCache = new LazyVCFGenotypesContext.HeaderDataCache(); headerDataCache.setHeader(readHeader()); ((LazyVCFGenotypesContext) gc).getParser().setHeaderDataCache(headerDataCache); for (Genotype genotype : genotypes) { Assert.assertEquals("checking genotype name", genotype.getSampleName(), gc.get(genotypes.indexOf(genotype)).getSampleName()); Assert.assertEquals("checking genotype quality", genotype.getGQ(), gc.get(genotypes.indexOf(genotype)).getGQ()); Assert.assertEquals("checking genotype read depth", genotype.getDP(), gc.get(genotypes.indexOf(genotype)).getDP()); } }