Java Code Examples for org.apache.avro.io.BinaryEncoder#flush()
The following examples show how to use
org.apache.avro.io.BinaryEncoder#flush() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FastGenericSerializerGeneratorTest.java From avro-fastserde with Apache License 2.0 | 6 votes |
private <T> Decoder serializeGenericFast(T data, Schema schema) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null); try { FastGenericSerializerGenerator<T> fastGenericSerializerGenerator = new FastGenericSerializerGenerator<>( schema, tempDir, classLoader, null); FastSerializer<T> fastSerializer = fastGenericSerializerGenerator.generateSerializer(); fastSerializer.serialize(data, binaryEncoder); binaryEncoder.flush(); } catch (Exception e) { throw new RuntimeException(e); } return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null); }
Example 2
Source File: KafkaAvroPublisher.java From doctorkafka with Apache License 2.0 | 6 votes |
public void publish(BrokerStats brokerStats) throws IOException { try { ByteArrayOutputStream stream = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream, null); avroEventWriter.write(brokerStats, binaryEncoder); binaryEncoder.flush(); IOUtils.closeQuietly(stream); String key = brokerStats.getName() + "_" + System.currentTimeMillis(); int numPartitions = kafkaProducer.partitionsFor(destTopic).size(); int partition = brokerStats.getId() % numPartitions; Future<RecordMetadata> future = kafkaProducer.send( new ProducerRecord<>(destTopic, partition, key.getBytes(), stream.toByteArray())); future.get(); OpenTsdbMetricConverter.incr("kafka.stats.collector.success", 1, "host=" + HOSTNAME); } catch (Exception e) { LOG.error("Failure in publish stats", e); OpenTsdbMetricConverter.incr("kafka.stats.collector.failure", 1, "host=" + HOSTNAME); throw new RuntimeException("Avro serialization failure", e); } }
Example 3
Source File: AvroGenericRecordAccessorTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@AfterMethod public void serializeRecord(ITestResult result) throws IOException { if (result.isSuccess() && result.getThrowable() == null) { /* Serialize the GenericRecord; this can catch issues in set() that the underlying GenericRecord * may not catch until serialize time */ DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(recordSchema); ByteArrayOutputStream bOs = new ByteArrayOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(bOs, null); datumWriter.write(record, encoder); encoder.flush(); bOs.flush(); Assert.assertTrue(bOs.toByteArray().length > 0); } }
Example 4
Source File: AvroToBytesConverter.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Override public Iterable<byte[]> convertRecord(String outputSchema, GenericRecord inputRecord, WorkUnitState workUnit) throws DataConversionException { try { ByteArrayOutputStream bytesOut = new ByteArrayOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(bytesOut, encoderCache.get()); encoderCache.set(encoder); writer.write(inputRecord, encoder); encoder.flush(); return Collections.singleton(bytesOut.toByteArray()); } catch (IOException e) { throw new DataConversionException("Error serializing record", e); } }
Example 5
Source File: LiAvroSerializerBase.java From incubator-gobblin with Apache License 2.0 | 6 votes |
public byte[] serialize(String topic, GenericRecord data) throws SerializationException { Schema schema = data.getSchema(); MD5Digest schemaId = null; try { schemaId = schemaRegistry.register(topic, schema); ByteArrayOutputStream out = new ByteArrayOutputStream(); // MAGIC_BYTE | schemaId-bytes | avro_payload out.write(LiAvroSerDeHelper.MAGIC_BYTE); out.write(schemaId.asBytes()); BinaryEncoder encoder = encoderFactory.directBinaryEncoder(out, null); DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema); writer.write(data, encoder); encoder.flush(); byte[] bytes = out.toByteArray(); out.close(); return bytes; } catch (IOException | SchemaRegistryException e) { throw new SerializationException(e); } }
Example 6
Source File: Person.java From components with Apache License 2.0 | 5 votes |
public byte[] serToAvroBytes() throws IOException { DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema); ByteArrayOutputStream out = new ByteArrayOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); datumWriter.write(toAvroRecord(), encoder); encoder.flush(); byte[] result = out.toByteArray(); out.close(); return result; }
Example 7
Source File: IcebergEncoder.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void encode(D datum, OutputStream stream) throws IOException { BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(stream, ENCODER.get()); ENCODER.set(encoder); writer.write(datum, encoder); encoder.flush(); }
Example 8
Source File: NamespaceValidationTest.java From avro-util with BSD 2-Clause "Simplified" License | 5 votes |
@Test public void testModernAvroValidatesNamespaces() throws Exception { AvroVersion runtimeVersion = AvroCompatibilityHelper.getRuntimeAvroVersion(); if (!runtimeVersion.laterThan(AvroVersion.AVRO_1_4)) { throw new SkipException("only supported under modern avro. runtime version detected as " + runtimeVersion); } String withAvsc = TestUtil.load("HasNamespace.avsc"); Schema with = Schema.parse(withAvsc); String withoutAvsc = TestUtil.load("HasNoNamespace.avsc"); Schema without = Schema.parse(withoutAvsc); GenericData.Record record = new GenericData.Record(without); record.put("f", AvroCompatibilityHelper.newEnumSymbol(without.getField("f").schema(), "B")); ByteArrayOutputStream os = new ByteArrayOutputStream(); GenericDatumWriter writer = new GenericDatumWriter(without); BinaryEncoder encoder = AvroCompatibilityHelper.newBinaryEncoder(os); //noinspection unchecked writer.write(record, encoder); encoder.flush(); byte[] bytes = os.toByteArray(); GenericDatumReader<GenericData.Record> reader = new GenericDatumReader<>(without, with); BinaryDecoder decoder = DecoderFactory.defaultFactory().createBinaryDecoder(bytes, null); try { GenericData.Record read = reader.read(null, decoder); Assert.fail("deserialization was expected to fail"); } catch (Exception expected) { Assert.assertTrue(expected.getMessage().contains("Found EnumType, expecting com.acme.EnumType")); } }
Example 9
Source File: TestAzureBlobAvroWriter.java From samza with Apache License 2.0 | 5 votes |
private byte[] encodeRecord(IndexedRecord record) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); Schema schema = record.getSchema(); EncoderFactory encoderfactory = new EncoderFactory(); BinaryEncoder encoder = encoderfactory.binaryEncoder(out, null); DatumWriter<IndexedRecord> writer; if (record instanceof SpecificRecord) { writer = new SpecificDatumWriter<>(schema); } else { writer = new GenericDatumWriter<>(schema); } writer.write(record, encoder); encoder.flush(); //encoder may buffer return out.toByteArray(); }
Example 10
Source File: TestAvroEventSerializer.java From mt-flume with Apache License 2.0 | 5 votes |
private byte[] serializeAvro(Object datum, Schema schema) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(schema); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); out.reset(); writer.write(datum, encoder); encoder.flush(); return out.toByteArray(); }
Example 11
Source File: AvroCompatibilityHelperGeneratedFixedClassesTest.java From avro-util with BSD 2-Clause "Simplified" License | 5 votes |
private void roundtrip(Object thingie) throws Exception { Schema schema = SpecificData.get().getSchema(thingie.getClass()); ByteArrayOutputStream os = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = AvroCompatibilityHelper.newBinaryEncoder(os, false, null); SpecificDatumWriter<Object> writer = new SpecificDatumWriter<>(schema); writer.write(thingie, binaryEncoder); binaryEncoder.flush(); byte[] serialized = os.toByteArray(); ByteArrayInputStream is = new ByteArrayInputStream(serialized); BinaryDecoder binaryDecoder = AvroCompatibilityHelper.newBinaryDecoder(is, false, null); SpecificDatumReader<Object> reader = new SpecificDatumReader<>(schema); Object deserialize = reader.read(null, binaryDecoder); Assert.assertEquals(deserialize, thingie); }
Example 12
Source File: Person.java From components with Apache License 2.0 | 5 votes |
public byte[] serToAvroBytes() throws IOException { DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema); ByteArrayOutputStream out = new ByteArrayOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); datumWriter.write(toAvroRecord(), encoder); encoder.flush(); byte[] result = out.toByteArray(); out.close(); return result; }
Example 13
Source File: AvroUtils.java From localization_nifi with Apache License 2.0 | 5 votes |
/** * Writes provided {@link GenericRecord} into the provided * {@link OutputStream}. */ public static void write(GenericRecord record, OutputStream out) { BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema()); try { writer.write(record, encoder); encoder.flush(); } catch (Exception e) { throw new IllegalStateException("Failed to write AVRO record", e); } }
Example 14
Source File: PubSubOutputRuntime.java From components with Apache License 2.0 | 5 votes |
@Override public PubsubMessage apply(IndexedRecord input) { try { DatumWriter<IndexedRecord> datumWriter = new GenericDatumWriter(input.getSchema()); ByteArrayOutputStream out = new ByteArrayOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); datumWriter.write(input, encoder); encoder.flush(); byte[] result = out.toByteArray(); out.close(); return new PubsubMessage(result, ImmutableMap.<String, String> of()); } catch (IOException e) { throw TalendRuntimeException.createUnexpectedException(e); } }
Example 15
Source File: TestConvertAvroToJSON.java From localization_nifi with Apache License 2.0 | 5 votes |
@Test public void testSingleSchemalessAvroMessage_noContainer() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new ConvertAvroToJSON()); runner.setProperty(ConvertAvroToJSON.CONTAINER_OPTIONS, ConvertAvroToJSON.CONTAINER_NONE); Schema schema = new Schema.Parser().parse(new File("src/test/resources/user.avsc")); String stringSchema = schema.toString(); runner.setProperty(ConvertAvroToJSON.SCHEMA, stringSchema); final GenericRecord user1 = new GenericData.Record(schema); user1.put("name", "Alyssa"); user1.put("favorite_number", 256); final ByteArrayOutputStream out1 = new ByteArrayOutputStream(); final BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out1, null); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); datumWriter.write(user1, encoder); encoder.flush(); out1.flush(); byte[] test = out1.toByteArray(); runner.enqueue(test); runner.run(); runner.assertAllFlowFilesTransferred(ConvertAvroToJSON.REL_SUCCESS, 1); final MockFlowFile out = runner.getFlowFilesForRelationship(ConvertAvroToJSON.REL_SUCCESS).get(0); out.assertContentEquals("{\"name\": \"Alyssa\", \"favorite_number\": 256, \"favorite_color\": null}"); }
Example 16
Source File: KafkaAvroWriter.java From hiped2 with Apache License 2.0 | 5 votes |
public static byte[] toBytes(Stock stock) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); BinaryEncoder encoder = encoderFactory.directBinaryEncoder(outputStream, null); DatumWriter<Stock> userDatumWriter = new SpecificDatumWriter<Stock>(Stock.class); try { userDatumWriter.write(stock, encoder); encoder.flush(); } catch (IOException e) { throw new RuntimeException(e); } return outputStream.toByteArray(); }
Example 17
Source File: HoodieAvroUtils.java From hudi with Apache License 2.0 | 5 votes |
/** * Convert a given avro record to bytes. */ public static byte[] avroToBytes(GenericRecord record) throws IOException { GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema()); ByteArrayOutputStream out = new ByteArrayOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, reuseEncoder.get()); reuseEncoder.set(encoder); writer.write(record, encoder); encoder.flush(); out.close(); return out.toByteArray(); }
Example 18
Source File: AvroConsoleProducer.java From HiveKa with Apache License 2.0 | 5 votes |
public static byte[] serializeAvro(Schema schema, GenericRecord event) throws IOException { ByteArrayOutputStream stream = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(stream, null); DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema); datumWriter.write(event, binaryEncoder); binaryEncoder.flush(); IOUtils.closeQuietly(stream); return stream.toByteArray(); }
Example 19
Source File: FastSerdeTestsSupport.java From avro-fastserde with Apache License 2.0 | 5 votes |
public static <T> Decoder serializeGeneric(T data, Schema schema) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = EncoderFactory.get().directBinaryEncoder(baos, null); try { GenericDatumWriter<T> writer = new GenericDatumWriter<>(schema); writer.write(data, binaryEncoder); binaryEncoder.flush(); } catch (Exception e) { throw new RuntimeException(e); } return DecoderFactory.get().binaryDecoder(baos.toByteArray(), null); }
Example 20
Source File: TruckParkAppIntegrationTest.java From data-highway with Apache License 2.0 | 5 votes |
@Override public byte[] serialize(String topic, Record record) { try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { baos.write(DataDeserializer.MAGIC_BYTE); baos.write(Ints.toByteArray(VERSION)); DatumWriter<Object> writer = new GenericDatumWriter<>(SCHEMA); BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(baos, null); writer.write(record, encoder); encoder.flush(); return baos.toByteArray(); } catch (IOException unreachable) { throw new RuntimeException(unreachable); } }