org.apache.avro.SchemaParseException Java Examples
The following examples show how to use
org.apache.avro.SchemaParseException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RestSchemaRegistryClient.java From nifi with Apache License 2.0 | 6 votes |
private RecordSchema createRecordSchema(final JsonNode schemaNode) throws SchemaNotFoundException { final String subject = schemaNode.get(SUBJECT_FIELD_NAME).asText(); final int version = schemaNode.get(VERSION_FIELD_NAME).asInt(); final int id = schemaNode.get(ID_FIELD_NAME).asInt(); final String schemaText = schemaNode.get(SCHEMA_TEXT_FIELD_NAME).asText(); try { final Schema avroSchema = new Schema.Parser().parse(schemaText); final SchemaIdentifier schemaId = SchemaIdentifier.builder().name(subject).id((long) id).version(version).build(); return AvroTypeUtil.createSchema(avroSchema, schemaText, schemaId); } catch (final SchemaParseException spe) { throw new SchemaNotFoundException("Obtained Schema with id " + id + " and name " + subject + " from Confluent Schema Registry but the Schema Text that was returned is not a valid Avro Schema"); } }
Example #2
Source File: SchemasConfluentIT.java From apicurio-registry with Apache License 2.0 | 6 votes |
@Test void createAndUpdateSchema() throws Exception { String artifactId = TestUtils.generateArtifactId(); Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo1\",\"type\":\"string\"}]}"); createArtifactViaConfluentClient(schema, artifactId); Schema updatedSchema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord2\",\"fields\":[{\"name\":\"foo2\",\"type\":\"long\"}]}"); createArtifactViaConfluentClient(updatedSchema, artifactId); assertThrows(SchemaParseException.class, () -> new Schema.Parser().parse("<type>record</type>\n<name>test</name>")); assertThat(confluentService.getAllVersions(artifactId), hasItems(1, 2)); confluentService.deleteSubject(artifactId); waitForSubjectDeleted(artifactId); }
Example #3
Source File: SchemaValidator.java From avro-util with BSD 2-Clause "Simplified" License | 6 votes |
/** * validation logic taken out of class {@link Schema} with adaptations * @param name name to be validated * @throws SchemaParseException is name is invalid */ private static void validateName(String name, String suffix) { int length = name.length(); if (length == 0) { throw new SchemaParseException("Empty name" + suffix); } char first = name.charAt(0); if (!(Character.isLetter(first) || first == '_')) { throw new SchemaParseException("Illegal initial character: " + name + suffix); } for (int i = 1; i < length; i++) { char c = name.charAt(i); if (!(Character.isLetterOrDigit(c) || c == '_')) { throw new SchemaParseException("Illegal character in: " + name + " ('" + c + "' at position " + i + ")" + suffix); } } }
Example #4
Source File: ParquetFileReaderTest.java From kafka-connect-fs with Apache License 2.0 | 6 votes |
@ParameterizedTest @MethodSource("fileSystemConfigProvider") public void readerWithUnparseableSchema(ReaderFsTestConfig fsConfig) { Map<String, Object> readerConfig = getReaderConfig(); readerConfig.put(ParquetFileReader.FILE_READER_PARQUET_SCHEMA, "invalid schema"); readerConfig.put(AgnosticFileReader.FILE_READER_AGNOSTIC_EXTENSIONS_PARQUET, getFileExtension()); assertThrows(ConnectException.class, () -> getReader(FileSystem.newInstance(fsConfig.getFsUri(), new Configuration()), fsConfig.getDataFile(), readerConfig)); assertThrows(SchemaParseException.class, () -> { try { getReader(FileSystem.newInstance(fsConfig.getFsUri(), new Configuration()), fsConfig.getDataFile(), readerConfig); } catch (Exception e) { throw e.getCause(); } }); }
Example #5
Source File: AvroProcessor.java From camel-quarkus with Apache License 2.0 | 6 votes |
@Record(ExecutionTime.STATIC_INIT) @BuildStep void recordAvroSchemasResigtration(BeanArchiveIndexBuildItem beanArchiveIndex, BeanContainerBuildItem beanContainer, AvroRecorder avroRecorder) { IndexView index = beanArchiveIndex.getIndex(); for (AnnotationInstance annotation : index.getAnnotations(BUILD_TIME_AVRO_DATAFORMAT_ANNOTATION)) { String schemaResourceName = annotation.value().asString(); FieldInfo fieldInfo = annotation.target().asField(); String injectedFieldId = fieldInfo.declaringClass().name() + "." + fieldInfo.name(); try (InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(schemaResourceName)) { Schema avroSchema = new Schema.Parser().parse(is); avroRecorder.recordAvroSchemaResigtration(beanContainer.getValue(), injectedFieldId, avroSchema); LOG.debug("Parsed the avro schema at build time from resource named " + schemaResourceName); } catch (SchemaParseException | IOException ex) { final String message = "An issue occured while parsing schema resource on field " + injectedFieldId; throw new RuntimeException(message, ex); } } }
Example #6
Source File: AvroSchemaConverter.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Converts an Avro schema string into a nested row structure with deterministic field order and data * types that are compatible with Flink's Table & SQL API. * * @param avroSchemaString Avro schema definition string * @return type information matching the schema */ @SuppressWarnings("unchecked") public static <T> TypeInformation<T> convertToTypeInfo(String avroSchemaString) { Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null."); final Schema schema; try { schema = new Schema.Parser().parse(avroSchemaString); } catch (SchemaParseException e) { throw new IllegalArgumentException("Could not parse Avro schema string.", e); } return (TypeInformation<T>) convertToTypeInfo(schema); }
Example #7
Source File: AvroUtils.java From kareldb with Apache License 2.0 | 5 votes |
public static Schema parseSchema(File file) throws IOException { try { Schema.Parser parser = new Schema.Parser(); return parser.parse(file); } catch (SchemaParseException e) { return null; } }
Example #8
Source File: MonsantoSchemaTest.java From avro-util with BSD 2-Clause "Simplified" License | 5 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateBadSchema() throws Exception { AvroVersion runtimeVersion = AvroCompatibilityHelper.getRuntimeAvroVersion(); if (runtimeVersion != AvroVersion.AVRO_1_4) { throw new SkipException("only supported under " + AvroVersion.AVRO_1_4 + ". runtime version detected as " + runtimeVersion); } String originalAvsc = TestUtil.load("BadInnerNamespace.avsc"); Schema parsed = Schema.parse(originalAvsc); String toStringOutput = parsed.toString(); Schema.parse(toStringOutput); Assert.fail("2nd parse expected to throw"); }
Example #9
Source File: AvroUtils.java From kareldb with Apache License 2.0 | 5 votes |
public static Schema parseSchema(String schemaString) { try { Schema.Parser parser = new Schema.Parser(); return parser.parse(schemaString); } catch (SchemaParseException e) { return null; } }
Example #10
Source File: AvroFileReaderTest.java From kafka-connect-fs with Apache License 2.0 | 5 votes |
@ParameterizedTest @MethodSource("fileSystemConfigProvider") public void readerWithUnparseableSchema(ReaderFsTestConfig fsConfig) throws IOException { Map<String, Object> readerConfig = getReaderConfig(); readerConfig.put(AvroFileReader.FILE_READER_AVRO_SCHEMA, "invalid schema"); FileSystem testFs = FileSystem.newInstance(fsConfig.getFsUri(), new Configuration()); assertThrows(ConnectException.class, () -> getReader(testFs, fsConfig.getDataFile(), readerConfig)); assertThrows(SchemaParseException.class, () -> { try { getReader(testFs, fsConfig.getDataFile(), readerConfig); } catch (Exception e) { throw e.getCause(); } }); }
Example #11
Source File: InputAvroSchemaTest.java From dbeam with Apache License 2.0 | 5 votes |
@Test(expected = SchemaParseException.class) public void checkReadAvroSchemaWithInvalidFormat() throws IOException { String invalidJson = "{"; File invalidFile = createTestAvroSchemaFile(invalidJson); String path = invalidFile.toPath().toString(); JdbcExportPipelineOptions options = PipelineOptionsFactory.create().as(JdbcExportPipelineOptions.class); options.setAvroSchemaFilePath(path); Assert.assertEquals(path, options.getAvroSchemaFilePath()); BeamJdbcAvroSchema.parseOptionalInputAvroSchemaFile(path); }
Example #12
Source File: JsonSchemaCompatibilityCheck.java From pulsar with Apache License 2.0 | 5 votes |
private boolean isAvroSchema(SchemaData schemaData) { try { Schema.Parser fromParser = new Schema.Parser(); fromParser.setValidateDefaults(false); Schema fromSchema = fromParser.parse(new String(schemaData.getData(), UTF_8)); return true; } catch (SchemaParseException e) { return false; } }
Example #13
Source File: PulsarMetadata.java From pulsar with Apache License 2.0 | 5 votes |
static List<ColumnMetadata> getPulsarColumnsFromStructSchema(TopicName topicName, SchemaInfo schemaInfo, boolean withInternalColumns, PulsarColumnHandle.HandleKeyValueType handleKeyValueType) { String schemaJson = new String(schemaInfo.getSchema()); if (StringUtils.isBlank(schemaJson)) { throw new PrestoException(NOT_SUPPORTED, "Topic " + topicName.toString() + " does not have a valid schema"); } Schema schema; try { schema = PulsarConnectorUtils.parseSchema(schemaJson); } catch (SchemaParseException ex) { throw new PrestoException(NOT_SUPPORTED, "Topic " + topicName.toString() + " does not have a valid schema"); } ImmutableList.Builder<ColumnMetadata> builder = ImmutableList.builder(); builder.addAll(getColumns(null, schema, new HashSet<>(), new Stack<>(), new Stack<>(), handleKeyValueType)); if (withInternalColumns) { PulsarInternalColumn.getInternalFields() .stream() .forEach(pulsarInternalColumn -> builder.add(pulsarInternalColumn.getColumnMetadata(false))); } return builder.build(); }
Example #14
Source File: AvroSchemaConverter.java From flink with Apache License 2.0 | 5 votes |
/** * Converts an Avro schema string into a nested row structure with deterministic field order and data * types that are compatible with Flink's Table & SQL API. * * @param avroSchemaString Avro schema definition string * @return type information matching the schema */ @SuppressWarnings("unchecked") public static <T> TypeInformation<T> convertToTypeInfo(String avroSchemaString) { Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null."); final Schema schema; try { schema = new Schema.Parser().parse(avroSchemaString); } catch (SchemaParseException e) { throw new IllegalArgumentException("Could not parse Avro schema string.", e); } return (TypeInformation<T>) convertToTypeInfo(schema); }
Example #15
Source File: AvroRowSerializationSchema.java From flink with Apache License 2.0 | 5 votes |
/** * Creates an Avro serialization schema for the given Avro schema string. * * @param avroSchemaString Avro schema string used to serialize Flink's row to Avro's record */ public AvroRowSerializationSchema(String avroSchemaString) { Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null."); this.recordClazz = null; this.schemaString = avroSchemaString; try { this.schema = new Schema.Parser().parse(avroSchemaString); } catch (SchemaParseException e) { throw new IllegalArgumentException("Could not parse Avro schema string.", e); } this.datumWriter = new GenericDatumWriter<>(schema); this.arrayOutputStream = new ByteArrayOutputStream(); this.encoder = EncoderFactory.get().binaryEncoder(arrayOutputStream, null); }
Example #16
Source File: AvroRowSerializationSchema.java From flink with Apache License 2.0 | 5 votes |
/** * Creates an Avro serialization schema for the given Avro schema string. * * @param avroSchemaString Avro schema string used to serialize Flink's row to Avro's record */ public AvroRowSerializationSchema(String avroSchemaString) { Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null."); this.recordClazz = null; this.schemaString = avroSchemaString; try { this.schema = new Schema.Parser().parse(avroSchemaString); } catch (SchemaParseException e) { throw new IllegalArgumentException("Could not parse Avro schema string.", e); } this.datumWriter = new GenericDatumWriter<>(schema); this.arrayOutputStream = new ByteArrayOutputStream(); this.encoder = EncoderFactory.get().binaryEncoder(arrayOutputStream, null); }
Example #17
Source File: AvroSchemaConverter.java From flink with Apache License 2.0 | 5 votes |
/** * Converts an Avro schema string into a nested row structure with deterministic field order and data * types that are compatible with Flink's Table & SQL API. * * @param avroSchemaString Avro schema definition string * @return type information matching the schema */ @SuppressWarnings("unchecked") public static <T> TypeInformation<T> convertToTypeInfo(String avroSchemaString) { Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null."); final Schema schema; try { schema = new Schema.Parser().parse(avroSchemaString); } catch (SchemaParseException e) { throw new IllegalArgumentException("Could not parse Avro schema string.", e); } return (TypeInformation<T>) convertToTypeInfo(schema); }
Example #18
Source File: AvroRowSerializationSchema.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Creates an Avro serialization schema for the given Avro schema string. * * @param avroSchemaString Avro schema string used to serialize Flink's row to Avro's record */ public AvroRowSerializationSchema(String avroSchemaString) { Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null."); this.recordClazz = null; this.schemaString = avroSchemaString; try { this.schema = new Schema.Parser().parse(avroSchemaString); } catch (SchemaParseException e) { throw new IllegalArgumentException("Could not parse Avro schema string.", e); } this.datumWriter = new GenericDatumWriter<>(schema); this.arrayOutputStream = new ByteArrayOutputStream(); this.encoder = EncoderFactory.get().binaryEncoder(arrayOutputStream, null); }
Example #19
Source File: Avro15ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro15ValidatesEnumValues() throws Exception { String avsc = TestUtil.load("EnumWithInvalidValue.avsc"); nativeParse(avsc, null); }
Example #20
Source File: Avro16ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro16ValidatesRecordNames() throws Exception { String avsc = TestUtil.load("RecordWithInvalidName.avsc"); nativeParse(avsc, null); }
Example #21
Source File: Avro16ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro16ValidatesFieldNames() throws Exception { String avsc = TestUtil.load("RecordWithInvalidFieldName.avsc"); nativeParse(avsc, null); }
Example #22
Source File: Avro16ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro16ValidatesFixedNames() throws Exception { String avsc = TestUtil.load("FixedWithInvalidName.avsc"); nativeParse(avsc, null); }
Example #23
Source File: Avro18ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro18ValidatesEnumValues() throws Exception { String avsc = TestUtil.load("EnumWithInvalidValue.avsc"); nativeParse(avsc, null, null); }
Example #24
Source File: Avro16ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro16ValidatesEnumNames() throws Exception { String avsc = TestUtil.load("EnumWithInvalidName.avsc"); nativeParse(avsc, null); }
Example #25
Source File: Avro16ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro16ValidatesEnumValues() throws Exception { String avsc = TestUtil.load("EnumWithInvalidValue.avsc"); nativeParse(avsc, null); }
Example #26
Source File: Avro17ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro17ValidatesRecordNames() throws Exception { String avsc = TestUtil.load("RecordWithInvalidName.avsc"); nativeParse(avsc, null, null); }
Example #27
Source File: Avro17ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro17ValidatesFieldNames() throws Exception { String avsc = TestUtil.load("RecordWithInvalidFieldName.avsc"); nativeParse(avsc, null, null); }
Example #28
Source File: Avro17ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro17ValidatesFixedNames() throws Exception { String avsc = TestUtil.load("FixedWithInvalidName.avsc"); nativeParse(avsc, null, null); }
Example #29
Source File: Avro17ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro17ValidatesEnumNames() throws Exception { String avsc = TestUtil.load("EnumWithInvalidName.avsc"); nativeParse(avsc, null, null); }
Example #30
Source File: Avro17ParseBehaviorTest.java From avro-util with BSD 2-Clause "Simplified" License | 4 votes |
@Test(expectedExceptions = SchemaParseException.class) public void demonstrateAvro17ValidatesEnumValues() throws Exception { String avsc = TestUtil.load("EnumWithInvalidValue.avsc"); nativeParse(avsc, null, null); }