Java Code Examples for org.apache.avro.Schema#toString()
The following examples show how to use
org.apache.avro.Schema#toString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AvroCompositeSchemasTest.java From registry with Apache License 2.0 | 7 votes |
private void doTestSchemaResolution(String givenSchemaLocation, String expectedSchemaLocation) throws IOException { AvroSchemaResolver avroSchemaResolver = new AvroSchemaResolver(null); Schema schema = new Schema.Parser().parse(getResourceText(givenSchemaLocation)); LOG.info("schema = %s", schema); Schema effectiveSchema = avroSchemaResolver.handleUnionFieldsWithNull(schema, new HashSet<>()); LOG.info("effectiveSchema = %s", effectiveSchema); String returnedSchemaText = effectiveSchema.toString(); Assert.assertEquals(getResourceText(expectedSchemaLocation).replace(" ", ""), returnedSchemaText.replace(" ", "")); // double check whether the effective schema is semantically right parsing Schema.Parser parser = new Schema.Parser(); Schema parsedReturnedSchema = parser.parse(returnedSchemaText); Assert.assertEquals(effectiveSchema, parsedReturnedSchema); }
Example 2
Source File: Conversions.java From kite with Apache License 2.0 | 6 votes |
public static Object convert(Object obj, Schema schema) { Class<?> returnType = SchemaUtil.getClassForType(schema.getType()); if (returnType != null) { return convert(obj, returnType); } switch (schema.getType()) { case UNION: for (Schema possibleSchema : schema.getTypes()) { try { return convert(obj, SchemaUtil.getClassForType(possibleSchema.getType())); } catch (ClassCastException e) { // didn't work, try the next one } } break; } throw new ClassCastException( "Cannot convert using schema: " + schema.toString(true)); }
Example 3
Source File: ResolveAvroDependenciesTask.java From gradle-avro-plugin with Apache License 2.0 | 6 votes |
private int processSchemaFiles() { Set<File> inputFiles = filterSources(new FileExtensionSpec(SCHEMA_EXTENSION)).getFiles(); ProcessingState processingState = resolver.resolve(inputFiles); for (Schema schema : processingState.getSchemas()) { try { String outputPath = schema.getNamespace().replaceAll(Pattern.quote("."), "/") + "/" + schema.getName() + "." + SCHEMA_EXTENSION; File outputFile = new File(getOutputDir().get().getAsFile(), outputPath); String schemaJson = schema.toString(true); FileUtils.writeJsonFile(outputFile, schemaJson); getLogger().debug("Wrote {}", outputFile.getPath()); } catch (IOException ex) { throw new GradleException(String.format("Failed to write resolved schema definition for %s", schema.getFullName()), ex); } } return processingState.getProcessedTotal(); }
Example 4
Source File: AvroCompositeSchemasTest.java From registry with Apache License 2.0 | 6 votes |
@Test public void testUnionSchemasPropRetention() throws Exception { AvroSchemaResolver avroSchemaResolver = new AvroSchemaResolver(null); Schema schema = new Schema.Parser().parse(getResourceText("/avro/composites/unions-with-props.avsc")); LOG.info("schema = %s", schema); Schema effectiveSchema = avroSchemaResolver.handleUnionFieldsWithNull(schema, new HashSet<>()); LOG.info("effectiveSchema = %s", effectiveSchema); String returnedSchemaText = effectiveSchema.toString(); Assert.assertEquals("foo", effectiveSchema.getField("name").getProp("someProp")); Assert.assertEquals("bar", effectiveSchema.getField("address").getProp("otherProp")); Assert.assertEquals("baz", effectiveSchema.getField("address").schema().getField("pincode").getProp("anotherProp")); Assert.assertEquals("quux", effectiveSchema.getField("secondaryAddress").getProp("moreProps")); // double check whether the effective schema is semantically right parsing Schema.Parser parser = new Schema.Parser(); Schema parsedReturnedSchema = parser.parse(returnedSchemaText); Assert.assertEquals(effectiveSchema, parsedReturnedSchema); }
Example 5
Source File: Compatibility.java From kite with Apache License 2.0 | 6 votes |
/** * Checks that the {@code existing} {@link DatasetDescriptor} is compatible * with {@code test}. * * @param existing the current {@code DatasetDescriptor} for a dataset * @param test a new {@code DatasetDescriptor} for the same dataset */ public static void checkCompatible(DatasetDescriptor existing, DatasetDescriptor test) { checkNotChanged("format", existing.getFormat(), test.getFormat()); checkNotChanged("partitioning", existing.isPartitioned(), test.isPartitioned()); if (existing.isPartitioned()) { checkStrategyUpdate( existing.getPartitionStrategy(), test.getPartitionStrategy(), test.getSchema()); } // check can read records written with old schema using new schema Schema oldSchema = existing.getSchema(); Schema testSchema = test.getSchema(); if (!SchemaValidationUtil.canRead(oldSchema, testSchema)) { throw new IncompatibleSchemaException("Schema cannot read data " + "written using existing schema. Schema: " + testSchema.toString(true) + "\nExisting schema: " + oldSchema.toString(true)); } }
Example 6
Source File: AvroSerializationSchema.java From flink with Apache License 2.0 | 5 votes |
/** * Creates an Avro deserialization schema. * * @param recordClazz class to serialize. Should be one of: * {@link org.apache.avro.specific.SpecificRecord}, * {@link org.apache.avro.generic.GenericRecord}. * @param schema writer Avro schema. Should be provided if recordClazz is * {@link GenericRecord} */ protected AvroSerializationSchema(Class<T> recordClazz, @Nullable Schema schema) { Preconditions.checkNotNull(recordClazz, "Avro record class must not be null."); this.recordClazz = recordClazz; this.schema = schema; if (schema != null) { this.schemaString = schema.toString(); } else { this.schemaString = null; } }
Example 7
Source File: PartitionData.java From iceberg with Apache License 2.0 | 5 votes |
/** * Used by Avro reflection to instantiate this class when reading manifest files. */ PartitionData(Schema schema) { this.partitionType = AvroSchemaUtil.convert(schema).asNestedType().asStructType(); this.size = partitionType.fields().size(); this.data = new Object[size]; this.stringSchema = schema.toString(); this.schema = schema; }
Example 8
Source File: TestConvertAvroToJSON.java From localization_nifi with Apache License 2.0 | 5 votes |
@Test public void testSingleSchemalessAvroMessage_noContainer() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new ConvertAvroToJSON()); runner.setProperty(ConvertAvroToJSON.CONTAINER_OPTIONS, ConvertAvroToJSON.CONTAINER_NONE); Schema schema = new Schema.Parser().parse(new File("src/test/resources/user.avsc")); String stringSchema = schema.toString(); runner.setProperty(ConvertAvroToJSON.SCHEMA, stringSchema); final GenericRecord user1 = new GenericData.Record(schema); user1.put("name", "Alyssa"); user1.put("favorite_number", 256); final ByteArrayOutputStream out1 = new ByteArrayOutputStream(); final BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out1, null); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); datumWriter.write(user1, encoder); encoder.flush(); out1.flush(); byte[] test = out1.toByteArray(); runner.enqueue(test); runner.run(); runner.assertAllFlowFilesTransferred(ConvertAvroToJSON.REL_SUCCESS, 1); final MockFlowFile out = runner.getFlowFilesForRelationship(ConvertAvroToJSON.REL_SUCCESS).get(0); out.assertContentEquals("{\"name\": \"Alyssa\", \"favorite_number\": 256, \"favorite_color\": null}"); }
Example 9
Source File: TestConvertAvroToJSON.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testSingleSchemalessAvroMessage() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new ConvertAvroToJSON()); Schema schema = new Schema.Parser().parse(new File("src/test/resources/user.avsc")); String stringSchema = schema.toString(); runner.setProperty(ConvertAvroToJSON.SCHEMA, stringSchema); final GenericRecord user1 = new GenericData.Record(schema); user1.put("name", "Alyssa"); user1.put("favorite_number", 256); final ByteArrayOutputStream out1 = new ByteArrayOutputStream(); final BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out1, null); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); datumWriter.write(user1, encoder); encoder.flush(); out1.flush(); byte[] test = out1.toByteArray(); runner.enqueue(test); runner.run(); runner.assertAllFlowFilesTransferred(ConvertAvroToJSON.REL_SUCCESS, 1); final MockFlowFile out = runner.getFlowFilesForRelationship(ConvertAvroToJSON.REL_SUCCESS).get(0); out.assertContentEquals("{\"name\": \"Alyssa\", \"favorite_number\": 256, \"favorite_color\": null}"); }
Example 10
Source File: JsonUtils.java From localization_nifi with Apache License 2.0 | 5 votes |
/** * Reads provided {@link InputStream} as ISON into Avro * {@link GenericRecord} applying provided {@link Schema} returning the * resulting GenericRecord. */ public static GenericRecord read(InputStream jsonIs, Schema schema) { DataInputStream din = new DataInputStream(jsonIs); try { Decoder decoder = DecoderFactory.get().jsonDecoder(schema, din); DatumReader<GenericData.Record> reader = new GenericDatumReader<>(schema); return reader.read(null, decoder); } catch (Exception e) { throw new IllegalStateException("Failed to parse incoming Json input stream into Avro GenericRecord. " + "Possible reason: the value may not be a valid JSON or incompatible schema is provided. Schema was '" + schema.toString(true) + "'.", e); } }
Example 11
Source File: AvroSchemaGenRelConverter.java From samza with Apache License 2.0 | 5 votes |
private Schema computePayloadSchema(String streamName, SamzaSqlRelMessage relMessage) { SamzaSqlRelRecord relRecord = relMessage.getSamzaSqlRelRecord(); List<Schema.Field> keyFields = new ArrayList<>(); List<String> fieldNames = relRecord.getFieldNames(); List<Object> values = relRecord.getFieldValues(); for (int index = 0; index < fieldNames.size(); index++) { if (fieldNames.get(index).equals(SamzaSqlRelMessage.KEY_NAME) || values.get(index) == null) { continue; } Object value = values.get(index); Schema avroType; if (value instanceof GenericData.Record) { avroType = ((GenericData.Record) value).getSchema(); } else { avroType = ReflectData.get().getSchema(value.getClass()); } keyFields.add(new Schema.Field(fieldNames.get(index), avroType, "", null)); } Schema ks = Schema.createRecord(streamName, "", streamName + "_namespace", false); ks.setFields(keyFields); String schemaStr = ks.toString(); Schema schema; // See whether we have a schema object corresponding to the schemaValue and reuse it. // CachedSchemaRegistryClient doesn't like if we recreate schema objects. if (schemas.containsKey(schemaStr)) { schema = schemas.get(schemaStr); } else { schema = Schema.parse(schemaStr); schemas.put(schemaStr, schema); } return schema; }
Example 12
Source File: PartitionData.java From iceberg with Apache License 2.0 | 5 votes |
/** * Used by Avro reflection to instantiate this class when reading manifest files. */ public PartitionData(Schema schema) { this.partitionType = AvroSchemaUtil.convert(schema).asNestedType().asStructType(); this.size = partitionType.fields().size(); this.data = new Object[size]; this.stringSchema = schema.toString(); this.schema = schema; }
Example 13
Source File: GenericAvroSerializer.java From jstorm with Apache License 2.0 | 4 votes |
@Override public String getFingerprint(Schema schema) { return schema.toString(); }
Example 14
Source File: PartitionCollapsingIncrementalCountJob.java From datafu with Apache License 2.0 | 4 votes |
public TheMerger(Schema schema) { this.schema = schema; this.schemaString = schema.toString(); }
Example 15
Source File: AbstractAvroSnapshotSerializer.java From registry with Apache License 2.0 | 4 votes |
/** * @param input avro object * @return textual representation of the schema of the given {@code input} avro object */ @Override protected String getSchemaText(Object input) { Schema schema = AvroUtils.computeSchema(input); return schema.toString(); }
Example 16
Source File: SerializationFrameworkAllBenchmarks.java From flink-benchmarks with Apache License 2.0 | 4 votes |
public AvroGenericRecordSource(int numEvents, int numKeys, Schema schema) { super(numEvents, numKeys); this.producedType = new GenericRecordAvroTypeInfo(schema); this.myPojoSchema = schema; this.schemaString = schema.toString(); }
Example 17
Source File: FixedFlowInputBoundedSource.java From components with Apache License 2.0 | 4 votes |
public FixedFlowInputBoundedSource withSchema(Schema schema) { this.schemaString = schema.toString(); return this; }
Example 18
Source File: GeneratorFunctions.java From components with Apache License 2.0 | 4 votes |
public EnumGenerator(Schema enumSchema) { jsonSchema = enumSchema.toString(); schema = enumSchema; }
Example 19
Source File: AvroKryoSerializerUtils.java From flink with Apache License 2.0 | 4 votes |
@Override public void write(Kryo kryo, Output output, Schema object) { String schemaAsString = object.toString(false); output.writeString(schemaAsString); }
Example 20
Source File: AvroKryoSerializerUtils.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Override public void write(Kryo kryo, Output output, Schema object) { String schemaAsString = object.toString(false); output.writeString(schemaAsString); }