org.apache.avro.specific.SpecificDatumReader Java Examples
The following examples show how to use
org.apache.avro.specific.SpecificDatumReader.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FastSerdeCache.java From avro-fastserde with Apache License 2.0 | 7 votes |
/** * Generates if needed and returns specific-class aware avro {@link FastDeserializer}. * * @param writerSchema * {@link Schema} of written data * @param readerSchema * {@link Schema} intended to be used during deserialization * @return specific-class aware avro {@link FastDeserializer} */ public FastDeserializer<?> getFastSpecificDeserializer(Schema writerSchema, Schema readerSchema) { String schemaKey = getSchemaKey(writerSchema, readerSchema); FastDeserializer<?> deserializer = fastSpecificRecordDeserializersCache.get(schemaKey); if (deserializer == null) { SpecificDatumReader<?> fallbackReader = new SpecificDatumReader<>(writerSchema, readerSchema); deserializer = fastSpecificRecordDeserializersCache.putIfAbsent(schemaKey, d -> fallbackReader.read(null, d)); if (deserializer == null) { deserializer = fastSpecificRecordDeserializersCache.get(schemaKey); CompletableFuture .supplyAsync(() -> buildSpecificDeserializer(writerSchema, readerSchema), executor).thenAccept( d -> { fastSpecificRecordDeserializersCache.put(schemaKey, d); }); } } return deserializer; }
Example #2
Source File: FastSpecificSerializerGeneratorTest.java From avro-util with BSD 2-Clause "Simplified" License | 6 votes |
@SuppressWarnings("unchecked") private <T> T decodeRecordFast(Schema writerSchema, Decoder decoder) { SpecificDatumReader<T> datumReader = new SpecificDatumReader<>(writerSchema); try { return datumReader.read(null, decoder); } catch (Exception e) { throw new RuntimeException(e); } }
Example #3
Source File: KafkaAvroJobMonitor.java From incubator-gobblin with Apache License 2.0 | 6 votes |
public KafkaAvroJobMonitor(String topic, MutableJobCatalog catalog, Config config, Schema schema, SchemaVersionWriter<?> versionWriter) { super(topic, catalog, config); this.schema = schema; this.decoder = new ThreadLocal<BinaryDecoder>() { @Override protected BinaryDecoder initialValue() { InputStream dummyInputStream = new ByteArrayInputStream(new byte[0]); return DecoderFactory.get().binaryDecoder(dummyInputStream, null); } }; this.reader = new ThreadLocal<SpecificDatumReader<T>>() { @Override protected SpecificDatumReader<T> initialValue() { return new SpecificDatumReader<>(KafkaAvroJobMonitor.this.schema); } }; this.versionWriter = versionWriter; }
Example #4
Source File: LaserFeatureListenser.java From laser with Apache License 2.0 | 6 votes |
public synchronized void recieveMessages(Message message) { final DatumReader<B5MEvent> reader = new SpecificDatumReader<B5MEvent>( B5MEvent.SCHEMA$); final B5MEvent b5mEvent = new B5MEvent(); byte[] data = message.getData(); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(data, null); try { reader.read(b5mEvent, decoder); for (LaserMessageConsumer consumer : this.consumer) { consumer.write(b5mEvent); } } catch (Exception e) { e.printStackTrace(); } }
Example #5
Source File: StreamToActionBusCallbackTest.java From Decision with Apache License 2.0 | 6 votes |
@Before public void setUp() throws Exception { siddhiManager= new StreamingSiddhiConfiguration().siddhiManager(); siddhiManager.defineStream(StreamsHelper.STREAM_DEFINITION); metadataService= new StreamMetadataService(siddhiManager); javaToSiddhiSerializer= new JavaToSiddhiSerializer(metadataService); javaToAvroSerializer = new JavaToAvroSerializer(new SpecificDatumReader(InsertMessage.getClassSchema())); Set<StreamAction> activeActions= new ListOrderedSet(); activeActions.add(StreamAction.LISTEN); producer= Mockito.mock(Producer.class); avroProducer= Mockito.mock(Producer.class); //List<KeyedMessage<String, String>> km= any(); //doNothing().when(producer).send(km); doNothing().when(producer).send(Matchers.<List<KeyedMessage<String, String>>>any()); cbk= new StreamToActionBusCallback(activeActions, streamName, avroProducer, javaToSiddhiSerializer, javaToAvroSerializer); }
Example #6
Source File: AvroInputFormat.java From stratosphere with Apache License 2.0 | 6 votes |
@Override public void open(FileInputSplit split) throws IOException { super.open(split); this.wrapper = InstantiationUtil.instantiate(avroWrapperTypeClass, AvroBaseValue.class); DatumReader<E> datumReader; if (org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType)) { datumReader = new SpecificDatumReader<E>(avroValueType); } else { datumReader = new ReflectDatumReader<E>(avroValueType); } LOG.info("Opening split " + split); SeekableInput in = new FSDataInputStreamWrapper(stream, (int) split.getLength()); dataFileReader = DataFileReader.openReader(in, datumReader); dataFileReader.sync(split.getStart()); reuseAvroValue = null; }
Example #7
Source File: AvroInputFormat.java From flink with Apache License 2.0 | 6 votes |
private DataFileReader<E> initReader(FileInputSplit split) throws IOException { DatumReader<E> datumReader; if (org.apache.avro.generic.GenericRecord.class == avroValueType) { datumReader = new GenericDatumReader<E>(); } else { datumReader = org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType) ? new SpecificDatumReader<E>(avroValueType) : new ReflectDatumReader<E>(avroValueType); } if (LOG.isInfoEnabled()) { LOG.info("Opening split {}", split); } SeekableInput in = new FSDataInputStreamWrapper(stream, split.getPath().getFileSystem().getFileStatus(split.getPath()).getLen()); DataFileReader<E> dataFileReader = (DataFileReader) DataFileReader.openReader(in, datumReader); if (LOG.isDebugEnabled()) { LOG.debug("Loaded SCHEMA: {}", dataFileReader.getSchema()); } end = split.getStart() + split.getLength(); recordsReadSinceLastSync = 0; return dataFileReader; }
Example #8
Source File: WikipediaArticleReaderTest.java From json-wikipedia with Apache License 2.0 | 6 votes |
@Test public void testAvroParsing() throws IOException, SAXException { File input = new File("src/test/resources/en/mercedes.xml"); final File output = File.createTempFile("jsonwikipedia-mercedes", ".avro"); output.deleteOnExit(); WikipediaArticleReader wap = new WikipediaArticleReader(input, output, "en"); wap.start(); // reading the encoded avro and checking that it is correct DatumReader<Article> userDatumReader = new SpecificDatumReader<>(Article.getClassSchema()); DataFileReader<Article> dataFileReader = new DataFileReader<>(output, userDatumReader); assertTrue(dataFileReader.hasNext()); Article article = new Article(); dataFileReader.next(article); assertEquals("Mercedes-Benz", article.getTitle()); assertEquals("Mercedes-Benz", article.getWikiTitle()); }
Example #9
Source File: AvroInputFormat.java From flink with Apache License 2.0 | 6 votes |
private DataFileReader<E> initReader(FileInputSplit split) throws IOException { DatumReader<E> datumReader; if (org.apache.avro.generic.GenericRecord.class == avroValueType) { datumReader = new GenericDatumReader<E>(); } else { datumReader = org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType) ? new SpecificDatumReader<E>(avroValueType) : new ReflectDatumReader<E>(avroValueType); } if (LOG.isInfoEnabled()) { LOG.info("Opening split {}", split); } SeekableInput in = new FSDataInputStreamWrapper(stream, split.getPath().getFileSystem().getFileStatus(split.getPath()).getLen()); DataFileReader<E> dataFileReader = (DataFileReader) DataFileReader.openReader(in, datumReader); if (LOG.isDebugEnabled()) { LOG.debug("Loaded SCHEMA: {}", dataFileReader.getSchema()); } end = split.getStart() + split.getLength(); recordsReadSinceLastSync = 0; return dataFileReader; }
Example #10
Source File: KafkaAvroJobStatusMonitor.java From incubator-gobblin with Apache License 2.0 | 6 votes |
public KafkaAvroJobStatusMonitor(String topic, Config config, int numThreads) throws IOException, ReflectiveOperationException { super(topic, config, numThreads); if (ConfigUtils.getBoolean(config, ConfigurationKeys.METRICS_REPORTING_KAFKA_USE_SCHEMA_REGISTRY, false)) { KafkaAvroSchemaRegistry schemaRegistry = (KafkaAvroSchemaRegistry) new KafkaAvroSchemaRegistryFactory(). create(ConfigUtils.configToProperties(config)); this.schemaVersionWriter = new SchemaRegistryVersionWriter(schemaRegistry, topic, GobblinTrackingEvent.SCHEMA$); } else { this.schemaVersionWriter = new FixedSchemaVersionWriter(); } this.decoder = ThreadLocal.withInitial(() -> { InputStream dummyInputStream = new ByteArrayInputStream(new byte[0]); return DecoderFactory.get().binaryDecoder(dummyInputStream, null); }); this.reader = ThreadLocal.withInitial(() -> new SpecificDatumReader<>(GobblinTrackingEvent.SCHEMA$)); }
Example #11
Source File: AvroInputFormat.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private DataFileReader<E> initReader(FileInputSplit split) throws IOException { DatumReader<E> datumReader; if (org.apache.avro.generic.GenericRecord.class == avroValueType) { datumReader = new GenericDatumReader<E>(); } else { datumReader = org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType) ? new SpecificDatumReader<E>(avroValueType) : new ReflectDatumReader<E>(avroValueType); } if (LOG.isInfoEnabled()) { LOG.info("Opening split {}", split); } SeekableInput in = new FSDataInputStreamWrapper(stream, split.getPath().getFileSystem().getFileStatus(split.getPath()).getLen()); DataFileReader<E> dataFileReader = (DataFileReader) DataFileReader.openReader(in, datumReader); if (LOG.isDebugEnabled()) { LOG.debug("Loaded SCHEMA: {}", dataFileReader.getSchema()); } end = split.getStart() + split.getLength(); recordsReadSinceLastSync = 0; return dataFileReader; }
Example #12
Source File: AvroDeserializationSchema.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
void checkAvroInitialized() { if (datumReader != null) { return; } ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (SpecificRecord.class.isAssignableFrom(recordClazz)) { SpecificData specificData = new SpecificData(cl); this.datumReader = new SpecificDatumReader<>(specificData); this.reader = specificData.getSchema(recordClazz); } else { this.reader = new Schema.Parser().parse(schemaString); GenericData genericData = new GenericData(cl); this.datumReader = new GenericDatumReader<>(null, this.reader, genericData); } this.inputStream = new MutableByteArrayInputStream(); this.decoder = DecoderFactory.get().binaryDecoder(inputStream, null); }
Example #13
Source File: DoctorKafkaActionsServlet.java From doctorkafka with Apache License 2.0 | 6 votes |
@Override public void renderJSON(PrintWriter writer, Map<String, String> params) { JsonArray json = new JsonArray(); for (ConsumerRecord<byte[], byte[]> record : Lists.reverse(retrieveActionReportMessages())) { try { JsonObject jsonRecord = new JsonObject(); BinaryDecoder binaryDecoder = avroDecoderFactory.binaryDecoder(record.value(), null); SpecificDatumReader<OperatorAction> reader = new SpecificDatumReader<>(operatorActionSchema); OperatorAction result = new OperatorAction(); reader.read(result, binaryDecoder); jsonRecord.add("date",gson.toJsonTree(new Date(result.getTimestamp()))); jsonRecord.add("clusterName",gson.toJsonTree(result.getClusterName())); jsonRecord.add("description",gson.toJsonTree(result.getDescription())); json.add(jsonRecord); } catch (Exception e) { LOG.info("Fail to decode an message", e); } } writer.print(json); }
Example #14
Source File: AvroJobSpecDeserializer.java From incubator-gobblin with Apache License 2.0 | 5 votes |
@Override public void configure(Map<String, ?> configs, boolean isKey) { InputStream dummyInputStream = new ByteArrayInputStream(new byte[0]); _decoder = DecoderFactory.get().binaryDecoder(dummyInputStream, null); _reader = new SpecificDatumReader<AvroJobSpec>(AvroJobSpec.SCHEMA$); _versionWriter = new FixedSchemaVersionWriter(); }
Example #15
Source File: EchoExecutor.java From Unified-Log-Processing with Apache License 2.0 | 5 votes |
public void execute(String command) { InputStream is = new ByteArrayInputStream(command.getBytes()); DataInputStream din = new DataInputStream(is); try { Decoder decoder = DecoderFactory.get().jsonDecoder(schema, din); DatumReader<Alert> reader = new SpecificDatumReader<Alert>(schema); Alert alert = reader.read(null, decoder); // c System.out.println("Alert " + alert.recipient.name + " about " + alert.notification.summary); // d } catch (IOException | AvroTypeException e) { System.out.println("Error executing command:" + e.getMessage()); } }
Example #16
Source File: AvroHttpSerializer.java From reef with Apache License 2.0 | 5 votes |
/** * Convert a file to AvroHttpRequest. * @param file * @return * @throws IOException */ public AvroHttpRequest fromFile(final File file) throws IOException { final AvroHttpRequest avrohttpRequest; try (DataFileReader<AvroHttpRequest> dataFileReader = new DataFileReader<>(file, new SpecificDatumReader<>(AvroHttpRequest.class))) { avrohttpRequest = dataFileReader.next(); } return avrohttpRequest; }
Example #17
Source File: AvroRecordInputFormatTest.java From flink with Apache License 2.0 | 5 votes |
/** * This test validates proper serialization with specific (generated POJO) types. */ @Test public void testDeserializeToSpecificType() throws IOException { DatumReader<User> datumReader = new SpecificDatumReader<>(userSchema); try (FileReader<User> dataFileReader = DataFileReader.openReader(testFile, datumReader)) { User rec = dataFileReader.next(); // check if record has been read correctly assertNotNull(rec); assertEquals("name not equal", TEST_NAME, rec.get("name").toString()); assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), rec.get("type_enum").toString()); // now serialize it with our framework: ExecutionConfig ec = new ExecutionConfig(); TypeInformation<User> te = TypeExtractor.createTypeInfo(User.class); assertEquals(AvroTypeInfo.class, te.getClass()); TypeSerializer<User> tser = te.createSerializer(ec); ByteArrayOutputStream out = new ByteArrayOutputStream(); try (DataOutputViewStreamWrapper outView = new DataOutputViewStreamWrapper(out)) { tser.serialize(rec, outView); } User newRec; try (DataInputViewStreamWrapper inView = new DataInputViewStreamWrapper( new ByteArrayInputStream(out.toByteArray()))) { newRec = tser.deserialize(inView); } // check if it is still the same assertNotNull(newRec); assertEquals("name not equal", TEST_NAME, newRec.getName().toString()); assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), newRec.getTypeEnum().toString()); } }
Example #18
Source File: EchoExecutor.java From Unified-Log-Processing with Apache License 2.0 | 5 votes |
public void execute(String command) { InputStream is = new ByteArrayInputStream(command.getBytes()); DataInputStream din = new DataInputStream(is); try { Decoder decoder = DecoderFactory.get().jsonDecoder(schema, din); DatumReader<Alert> reader = new SpecificDatumReader<Alert>(schema); Alert alert = reader.read(null, decoder); // c System.out.println("Alert " + alert.recipient.name + " about " + alert.notification.summary); // d } catch (IOException | AvroTypeException e) { System.out.println("Error executing command:" + e.getMessage()); } }
Example #19
Source File: EventUtils.java From incubator-gobblin with Apache License 2.0 | 5 votes |
/** * Parses a {@link org.apache.gobblin.metrics.MetricReport} from a byte array representing a json input. * @param reuse MetricReport to reuse. * @param bytes Input bytes. * @return MetricReport. * @throws java.io.IOException */ public synchronized static GobblinTrackingEvent deserializeReportFromJson(GobblinTrackingEvent reuse, byte[] bytes) throws IOException { if (!reader.isPresent()) { reader = Optional.of(new SpecificDatumReader<>(GobblinTrackingEvent.class)); } Closer closer = Closer.create(); try { DataInputStream inputStream = closer.register(new DataInputStream(new ByteArrayInputStream(bytes))); // Check version byte int versionNumber = inputStream.readInt(); if (versionNumber != SCHEMA_VERSION) { throw new IOException(String .format("MetricReport schema version not recognized. Found version %d, expected %d.", versionNumber, SCHEMA_VERSION)); } // Decode the rest Decoder decoder = DecoderFactory.get().jsonDecoder(GobblinTrackingEvent.SCHEMA$, inputStream); return reader.get().read(reuse, decoder); } catch(Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
Example #20
Source File: HBaseScanAvroStock.java From hiped2 with Apache License 2.0 | 5 votes |
public AvroStockReader() { reader = new SpecificDatumReader<Stock>(Stock.class); CodeSource src = SpecificDatumReader.class.getProtectionDomain().getCodeSource(); if (src != null) { URL jar = src.getLocation(); System.out.println("Loaded from " + jar); } }
Example #21
Source File: TypeUtils.java From geowave with Apache License 2.0 | 5 votes |
private static <T> T deserialize( final T avroObject, final byte[] avroData, final Class<T> avroClass, final Schema avroSchema) throws IOException { final BinaryDecoder decoder = df.binaryDecoder(avroData, null); if (!readers.containsKey(avroClass.toString())) { readers.put(avroClass.toString(), new SpecificDatumReader(avroSchema)); } final SpecificDatumReader<T> reader = readers.get(avroClass.toString()); return reader.read(avroObject, decoder); }
Example #22
Source File: AvroCodec.java From schema-evolution-samples with Apache License 2.0 | 5 votes |
private DatumReader getDatumReader(Class<?> type, Schema writer){ DatumReader reader = null; if(SpecificRecord.class.isAssignableFrom(type)){ reader = new SpecificDatumReader<>(writer,getReaderSchema(writer)); } else if(GenericRecord.class.isAssignableFrom(type)){ reader = new GenericDatumReader<>(writer,getReaderSchema(writer)); }else{ reader = new ReflectDatumReader<>(writer,getReaderSchema(writer)); } return reader; }
Example #23
Source File: AbstractKafkaAvroDeserializer.java From MongoDb-Sink-Connector with Apache License 2.0 | 5 votes |
private DatumReader getDatumReader(Schema writerSchema, Schema readerSchema) { boolean writerSchemaIsPrimitive = getPrimitiveSchemas().values().contains(writerSchema); // do not use SpecificDatumReader if writerSchema is a primitive if (useSpecificAvroReader && !writerSchemaIsPrimitive) { if (readerSchema == null) { readerSchema = getReaderSchema(writerSchema); } return new SpecificDatumReader(writerSchema, readerSchema); } else { if (readerSchema == null) { return new GenericDatumReader(writerSchema); } return new GenericDatumReader(writerSchema, readerSchema); } }
Example #24
Source File: DefaultAvroSerDesHandler.java From registry with Apache License 2.0 | 5 votes |
private DatumReader getDatumReader(Schema writerSchema, Schema readerSchema, boolean useSpecificAvroReader) { if (useSpecificAvroReader) { if (readerSchema == null) { readerSchema = this.getReaderSchema(writerSchema); } return new SpecificDatumReader(writerSchema, readerSchema); } else { return readerSchema == null ? new GenericDatumReader(writerSchema) : new GenericDatumReader(writerSchema, readerSchema); } }
Example #25
Source File: ConfluentAvroSerDesHandler.java From registry with Apache License 2.0 | 5 votes |
private DatumReader getDatumReader(Schema writerSchema, Schema readerSchema, boolean useSpecificAvroReader) { if (useSpecificAvroReader) { if (readerSchema == null) { readerSchema = this.getReaderSchema(writerSchema); } return new SpecificDatumReader(writerSchema, readerSchema); } else { return readerSchema == null ? new GenericDatumReader(writerSchema) : new GenericDatumReader(writerSchema, readerSchema); } }
Example #26
Source File: AvroHttpSerializer.java From reef with Apache License 2.0 | 5 votes |
/** * Convert bytes to AvroHttpRequest. */ public AvroHttpRequest fromBytes(final byte[] theBytes) { try { final BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(theBytes, null); final SpecificDatumReader<AvroHttpRequest> reader = new SpecificDatumReader<>(AvroHttpRequest.class); return reader.read(null, decoder); } catch (final IOException e) { throw new RuntimeException(e); } }
Example #27
Source File: AvroSpecificSerialization.java From big-c with Apache License 2.0 | 5 votes |
@InterfaceAudience.Private @Override public DatumReader getReader(Class<SpecificRecord> clazz) { try { return new SpecificDatumReader(clazz.newInstance().getSchema()); } catch (Exception e) { throw new RuntimeException(e); } }
Example #28
Source File: EventReader.java From big-c with Apache License 2.0 | 5 votes |
/** * Create a new Event Reader * @param in * @throws IOException */ @SuppressWarnings("deprecation") public EventReader(DataInputStream in) throws IOException { this.in = in; this.version = in.readLine(); if (!EventWriter.VERSION.equals(version)) { throw new IOException("Incompatible event log version: "+version); } Schema myschema = new SpecificData(Event.class.getClassLoader()).getSchema(Event.class); this.schema = Schema.parse(in.readLine()); this.reader = new SpecificDatumReader(schema, myschema); this.decoder = DecoderFactory.get().jsonDecoder(schema, in); }
Example #29
Source File: SimpleAVROSchema.java From jMetalSP with MIT License | 5 votes |
private void ensureInitialized() { if (reader == null) { if (org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroType)) { reader = new SpecificDatumReader<T>(avroType); } else { reader = new ReflectDatumReader<T>(avroType); } } }
Example #30
Source File: AvroDeSerealizer.java From tutorials with MIT License | 5 votes |
public AvroHttpRequest deSerealizeAvroHttpRequestJSON(byte[] data) { DatumReader<AvroHttpRequest> reader = new SpecificDatumReader<>(AvroHttpRequest.class); Decoder decoder = null; try { decoder = DecoderFactory.get() .jsonDecoder(AvroHttpRequest.getClassSchema(), new String(data)); return reader.read(null, decoder); } catch (IOException e) { logger.error("Deserialization error" + e.getMessage()); } return null; }