org.apache.avro.specific.SpecificRecordBase Java Examples
The following examples show how to use
org.apache.avro.specific.SpecificRecordBase.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ObjectEncodingHandlerAvorImpl.java From alibaba-rsocket-broker with Apache License 2.0 | 6 votes |
@Override @NotNull public ByteBuf encodingResult(@Nullable Object result) throws EncodingException { if (result instanceof SpecificRecordBase) { Class<?> objectClass = result.getClass(); Method toByteBufferMethod = toByteBufferMethodStore.get(objectClass); if (toByteBufferMethod != null) { try { ByteBuffer byteBuffer = (ByteBuffer) toByteBufferMethod.invoke(result); return Unpooled.wrappedBuffer(byteBuffer); } catch (Exception e) { throw new EncodingException(RsocketErrorCode.message("RST-700500", result.toString(), "ByteBuf"), e); } } } return EMPTY_BUFFER; }
Example #2
Source File: ObjectEncodingHandlerAvorImpl.java From alibaba-rsocket-broker with Apache License 2.0 | 6 votes |
@Override @Nullable public Object decodeResult(ByteBuf data, @Nullable Class<?> targetClass) throws EncodingException { if (data.readableBytes() > 0 && targetClass != null) { if (SpecificRecordBase.class.equals(targetClass.getSuperclass())) { Method fromByteBufferMethod = fromByteBufferMethodStore.get(targetClass); if (fromByteBufferMethod != null) { try { return fromByteBufferMethod.invoke(null, data.nioBuffer()); } catch (Exception e) { throw new EncodingException(RsocketErrorCode.message("RST-700501", "bytebuf", targetClass.getName()), e); } } } } return null; }
Example #3
Source File: ProtocolSerializer.java From reef with Apache License 2.0 | 6 votes |
/** * Finds all of the messages in the specified packaged and calls register. * @param messagePackage A string which contains the full name of the * package containing the protocol messages. */ @Inject private ProtocolSerializer( @Parameter(ProtocolSerializerNamespace.class) final String messagePackage) { // Build a list of the message reflection classes. final ScanResult scanResult = new FastClasspathScanner(messagePackage).scan(); final List<String> scanNames = scanResult.getNamesOfSubclassesOf(SpecificRecordBase.class); final List<Class<?>> messageClasses = scanResult.classNamesToClassRefs(scanNames); // Add the header message from the org.apache.reef.wake.avro.message package. messageClasses.add(Header.class); // Register all of the messages in the specified package. for (final Class<?> cls : messageClasses) { this.register(cls); } }
Example #4
Source File: TimelineMetadataUtils.java From hudi with Apache License 2.0 | 5 votes |
public static <T extends SpecificRecordBase> Option<byte[]> serializeAvroMetadata(T metadata, Class<T> clazz) throws IOException { DatumWriter<T> datumWriter = new SpecificDatumWriter<>(clazz); DataFileWriter<T> fileWriter = new DataFileWriter<>(datumWriter); ByteArrayOutputStream baos = new ByteArrayOutputStream(); fileWriter.create(metadata.getSchema(), baos); fileWriter.append(metadata); fileWriter.flush(); return Option.of(baos.toByteArray()); }
Example #5
Source File: TimelineMetadataUtils.java From hudi with Apache License 2.0 | 5 votes |
public static <T extends SpecificRecordBase> T deserializeAvroMetadata(byte[] bytes, Class<T> clazz) throws IOException { DatumReader<T> reader = new SpecificDatumReader<>(clazz); FileReader<T> fileReader = DataFileReader.openReader(new SeekableByteArrayInput(bytes), reader); ValidationUtils.checkArgument(fileReader.hasNext(), "Could not deserialize metadata of type " + clazz); return fileReader.next(); }
Example #6
Source File: AvroWriters.java From flink with Apache License 2.0 | 5 votes |
/** * Creates an {@link AvroWriterFactory} for an Avro specific type. The Avro writers * will use the schema of that specific type to build and write the records. * * @param type The class of the type to write. */ public static <T extends SpecificRecordBase> AvroWriterFactory<T> forSpecificRecord(Class<T> type) { String schemaString = SpecificData.get().getSchema(type).toString(); AvroBuilder<T> builder = (out) -> createAvroDataFileWriter( schemaString, SpecificDatumWriter::new, out); return new AvroWriterFactory<>(builder); }
Example #7
Source File: TestAvroConsumerConfluent.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 6) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> " + "--schema-registry-url <confluent schema registry> --group.id <some id>"); return; } Properties config = new Properties(); config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers")); config.setProperty("group.id", parameterTool.getRequired("group.id")); config.setProperty("zookeeper.connect", parameterTool.getRequired("zookeeper.connect")); String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); DataStreamSource<User> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest()); SingleOutputStreamOperator<String> mapToString = input .map((MapFunction<User, String>) SpecificRecordBase::toString); FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new SimpleStringSchema(), config); mapToString.addSink(stringFlinkKafkaProducer010); env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example"); }
Example #8
Source File: TestAvroConsumerConfluent.java From flink with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 6) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> " + "--schema-registry-url <confluent schema registry> --group.id <some id>"); return; } Properties config = new Properties(); config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers")); config.setProperty("group.id", parameterTool.getRequired("group.id")); config.setProperty("zookeeper.connect", parameterTool.getRequired("zookeeper.connect")); String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); DataStreamSource<User> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest()); SingleOutputStreamOperator<String> mapToString = input .map((MapFunction<User, String>) SpecificRecordBase::toString); FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new SimpleStringSchema(), config); mapToString.addSink(stringFlinkKafkaProducer010); env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example"); }
Example #9
Source File: TestAvroConsumerConfluent.java From flink with Apache License 2.0 | 3 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 6) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-string-topic <topic> --output-avro-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--schema-registry-url <confluent schema registry> --group.id <some id>"); return; } Properties config = new Properties(); config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers")); config.setProperty("group.id", parameterTool.getRequired("group.id")); String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<User> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest()); SingleOutputStreamOperator<String> mapToString = input .map((MapFunction<User, String>) SpecificRecordBase::toString); FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-string-topic"), new SimpleStringSchema(), config); mapToString.addSink(stringFlinkKafkaProducer010); FlinkKafkaProducer010<User> avroFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-avro-topic"), ConfluentRegistryAvroSerializationSchema.forSpecific(User.class, parameterTool.getRequired("output-subject"), schemaRegistryUrl), config); input.addSink(avroFlinkKafkaProducer010); env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example"); }
Example #10
Source File: ParquetAvroWriters.java From Flink-CEPplus with Apache License 2.0 | 2 votes |
/** * Creates a ParquetWriterFactory for an Avro specific type. The Parquet writers will use the * schema of that specific type to build and write the columnar data. * * @param type The class of the type to write. */ public static <T extends SpecificRecordBase> ParquetWriterFactory<T> forSpecificRecord(Class<T> type) { final String schemaString = SpecificData.get().getSchema(type).toString(); final ParquetBuilder<T> builder = (out) -> createAvroParquetWriter(schemaString, SpecificData.get(), out); return new ParquetWriterFactory<>(builder); }
Example #11
Source File: ParquetAvroWriters.java From flink with Apache License 2.0 | 2 votes |
/** * Creates a ParquetWriterFactory for an Avro specific type. The Parquet writers will use the * schema of that specific type to build and write the columnar data. * * @param type The class of the type to write. */ public static <T extends SpecificRecordBase> ParquetWriterFactory<T> forSpecificRecord(Class<T> type) { final String schemaString = SpecificData.get().getSchema(type).toString(); final ParquetBuilder<T> builder = (out) -> createAvroParquetWriter(schemaString, SpecificData.get(), out); return new ParquetWriterFactory<>(builder); }
Example #12
Source File: ParquetAvroWriters.java From flink with Apache License 2.0 | 2 votes |
/** * Creates a ParquetWriterFactory for an Avro specific type. The Parquet writers will use the * schema of that specific type to build and write the columnar data. * * @param type The class of the type to write. */ public static <T extends SpecificRecordBase> ParquetWriterFactory<T> forSpecificRecord(Class<T> type) { final String schemaString = SpecificData.get().getSchema(type).toString(); final ParquetBuilder<T> builder = (out) -> createAvroParquetWriter(schemaString, SpecificData.get(), out); return new ParquetWriterFactory<>(builder); }