Java Code Examples for org.apache.hadoop.io.serializer.Serializer#close()
The following examples show how to use
org.apache.hadoop.io.serializer.Serializer#close() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestWritableJobConf.java From hadoop with Apache License 2.0 | 6 votes |
private <K> K serDeser(K conf) throws Exception { SerializationFactory factory = new SerializationFactory(CONF); Serializer<K> serializer = factory.getSerializer(GenericsUtil.getClass(conf)); Deserializer<K> deserializer = factory.getDeserializer(GenericsUtil.getClass(conf)); DataOutputBuffer out = new DataOutputBuffer(); serializer.open(out); serializer.serialize(conf); serializer.close(); DataInputBuffer in = new DataInputBuffer(); in.reset(out.getData(), out.getLength()); deserializer.open(in); K after = deserializer.deserialize(null); deserializer.close(); return after; }
Example 2
Source File: Chain.java From hadoop with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example 3
Source File: TestWritableJobConf.java From big-c with Apache License 2.0 | 6 votes |
private <K> K serDeser(K conf) throws Exception { SerializationFactory factory = new SerializationFactory(CONF); Serializer<K> serializer = factory.getSerializer(GenericsUtil.getClass(conf)); Deserializer<K> deserializer = factory.getDeserializer(GenericsUtil.getClass(conf)); DataOutputBuffer out = new DataOutputBuffer(); serializer.open(out); serializer.serialize(conf); serializer.close(); DataInputBuffer in = new DataInputBuffer(); in.reset(out.getData(), out.getLength()); deserializer.open(in); K after = deserializer.deserialize(null); deserializer.close(); return after; }
Example 4
Source File: Chain.java From big-c with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example 5
Source File: OutputCollectorImpl.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
private <T> T cloneObj(T t) throws IOException { Serializer<T> keySerializer; Class<T> keyClass; PipedInputStream pis = new PipedInputStream(); PipedOutputStream pos = new PipedOutputStream(pis); keyClass = (Class<T>)t.getClass(); keySerializer = serializationFactory.getSerializer(keyClass); keySerializer.open(pos); keySerializer.serialize(t); Deserializer<T> keyDesiralizer = serializationFactory.getDeserializer(keyClass); keyDesiralizer.open(pis); T clonedArg0 = keyDesiralizer.deserialize(null); pos.close(); pis.close(); keySerializer.close(); keyDesiralizer.close(); return clonedArg0; }
Example 6
Source File: Chain.java From RDFS with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example 7
Source File: TestWritableJobConf.java From RDFS with Apache License 2.0 | 6 votes |
private <K> K serDeser(K conf) throws Exception { SerializationFactory factory = new SerializationFactory(CONF); Serializer<K> serializer = factory.getSerializer(GenericsUtil.getClass(conf)); Deserializer<K> deserializer = factory.getDeserializer(GenericsUtil.getClass(conf)); DataOutputBuffer out = new DataOutputBuffer(); serializer.open(out); serializer.serialize(conf); serializer.close(); DataInputBuffer in = new DataInputBuffer(); in.reset(out.getData(), out.getLength()); deserializer.open(in); K after = deserializer.deserialize(null); deserializer.close(); return after; }
Example 8
Source File: Chain.java From hadoop-gpu with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example 9
Source File: TestWritableJobConf.java From hadoop-gpu with Apache License 2.0 | 6 votes |
private <K> K serDeser(K conf) throws Exception { SerializationFactory factory = new SerializationFactory(CONF); Serializer<K> serializer = factory.getSerializer(GenericsUtil.getClass(conf)); Deserializer<K> deserializer = factory.getDeserializer(GenericsUtil.getClass(conf)); DataOutputBuffer out = new DataOutputBuffer(); serializer.open(out); serializer.serialize(conf); serializer.close(); DataInputBuffer in = new DataInputBuffer(); in.reset(out.getData(), out.getLength()); deserializer.open(in); K after = deserializer.deserialize(null); deserializer.close(); return after; }
Example 10
Source File: MapOperatorTest.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
public void testNodeProcessingSchema(MapOperator<LongWritable, Text, Text, IntWritable> oper) throws IOException { CollectorTestSink sortSink = new CollectorTestSink(); oper.output.setSink(sortSink); oper.setMapClass(WordCount.Map.class); oper.setCombineClass(WordCount.Reduce.class); oper.setDirName(testMeta.testDir); oper.setConfigFile(null); oper.setInputFormatClass(TextInputFormat.class); Configuration conf = new Configuration(); JobConf jobConf = new JobConf(conf); FileInputFormat.setInputPaths(jobConf, new Path(testMeta.testDir)); TextInputFormat inputFormat = new TextInputFormat(); inputFormat.configure(jobConf); InputSplit[] splits = inputFormat.getSplits(jobConf, 1); SerializationFactory serializationFactory = new SerializationFactory(conf); Serializer keySerializer = serializationFactory.getSerializer(splits[0].getClass()); keySerializer.open(oper.getOutstream()); keySerializer.serialize(splits[0]); oper.setInputSplitClass(splits[0].getClass()); keySerializer.close(); oper.setup(null); oper.beginWindow(0); oper.emitTuples(); oper.emitTuples(); oper.endWindow(); oper.beginWindow(1); oper.emitTuples(); oper.endWindow(); Assert.assertEquals("number emitted tuples", 3, sortSink.collectedTuples.size()); for (Object o : sortSink.collectedTuples) { LOG.debug(o.toString()); } LOG.debug("Done testing round\n"); oper.teardown(); }
Example 11
Source File: JobClient.java From RDFS with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private <T extends org.apache.hadoop.mapreduce.InputSplit> List<RawSplit> computeNewSplits(JobContext job) throws IOException, InterruptedException, ClassNotFoundException { JobConf conf = job.getJobConf(); org.apache.hadoop.mapreduce.InputFormat<?,?> input = ReflectionUtils.newInstance(job.getInputFormatClass(), job.getJobConf()); List<org.apache.hadoop.mapreduce.InputSplit> splits = input.getSplits(job); T[] array = (T[]) splits.toArray(new org.apache.hadoop.mapreduce.InputSplit[splits.size()]); // sort the splits into order based on size, so that the biggest // go first Arrays.sort(array, new NewSplitComparator()); List<RawSplit> rawSplits = new ArrayList<RawSplit>(); if (array.length != 0) { DataOutputBuffer buffer = new DataOutputBuffer(); SerializationFactory factory = new SerializationFactory(conf); Serializer<T> serializer = factory.getSerializer((Class<T>) array[0].getClass()); serializer.open(buffer); for(T split: array) { RawSplit rawSplit = new RawSplit(); rawSplit.setClassName(split.getClass().getName()); buffer.reset(); serializer.serialize(split); rawSplit.setDataLength(split.getLength()); rawSplit.setBytes(buffer.getData(), 0, buffer.getLength()); rawSplit.setLocations(split.getLocations()); rawSplits.add(rawSplit); } serializer.close(); } return rawSplits; }
Example 12
Source File: JobClient.java From hadoop-gpu with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") private <T extends org.apache.hadoop.mapreduce.InputSplit> int writeNewSplits(JobContext job, Path submitSplitFile ) throws IOException, InterruptedException, ClassNotFoundException { JobConf conf = job.getJobConf(); org.apache.hadoop.mapreduce.InputFormat<?,?> input = ReflectionUtils.newInstance(job.getInputFormatClass(), job.getJobConf()); List<org.apache.hadoop.mapreduce.InputSplit> splits = input.getSplits(job); T[] array = (T[]) splits.toArray(new org.apache.hadoop.mapreduce.InputSplit[splits.size()]); // sort the splits into order based on size, so that the biggest // go first Arrays.sort(array, new NewSplitComparator()); DataOutputStream out = writeSplitsFileHeader(conf, submitSplitFile, array.length); try { if (array.length != 0) { DataOutputBuffer buffer = new DataOutputBuffer(); RawSplit rawSplit = new RawSplit(); SerializationFactory factory = new SerializationFactory(conf); Serializer<T> serializer = factory.getSerializer((Class<T>) array[0].getClass()); serializer.open(buffer); for(T split: array) { rawSplit.setClassName(split.getClass().getName()); buffer.reset(); serializer.serialize(split); rawSplit.setDataLength(split.getLength()); rawSplit.setBytes(buffer.getData(), 0, buffer.getLength()); rawSplit.setLocations(split.getLocations()); rawSplit.write(out); } serializer.close(); } } finally { out.close(); } return array.length; }