org.apache.kafka.connect.data.SchemaBuilder Java Examples
The following examples show how to use
org.apache.kafka.connect.data.SchemaBuilder.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: QueryEngine.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
StructuredDataSource getResultDatasource(final Select select, final String name) { SchemaBuilder dataSource = SchemaBuilder.struct().name(name); for (SelectItem selectItem : select.getSelectItems()) { if (selectItem instanceof SingleColumn) { SingleColumn singleColumn = (SingleColumn) selectItem; String fieldName = singleColumn.getAlias().get(); dataSource = dataSource.field(fieldName, Schema.BOOLEAN_SCHEMA); } } KsqlTopic ksqlTopic = new KsqlTopic(name, name, null); return new KsqlStream( "QueryEngine-DDLCommand-Not-Needed", name, dataSource.schema(), null, null, ksqlTopic ); }
Example #2
Source File: CloudPubSubSinkTaskTest.java From pubsub with Apache License 2.0 | 6 votes |
@Test(expected = DataException.class) public void testStructSchemaWithNestedSchema() { task.start(props); Schema nestedSchema = SchemaBuilder.struct().build(); Struct nestedVal = new Struct(nestedSchema); Schema schema = SchemaBuilder.struct().field(FIELD_STRING1, SchemaBuilder.string()) .field(FIELD_STRING2, nestedSchema).build(); Struct val = new Struct(schema); val.put(FIELD_STRING1, "tide"); val.put(FIELD_STRING2, nestedVal); SinkRecord record = new SinkRecord(null, -1, null, null, schema, val, -1); List<SinkRecord> list = new ArrayList<>(); list.add(record); task.put(list); }
Example #3
Source File: HeaderToField.java From kafka-connect-transform-common with Apache License 2.0 | 6 votes |
Conversion conversion(Schema schema) { return this.schemaCache.computeIfAbsent(schema, s -> { log.info("conversion() - Building new schema for {}", schema); SchemaBuilder builder = SchemaBuilders.of(schema); List<ConversionHandler> handlers = new ArrayList<>(this.config.mappings.size()); for (HeaderToFieldConfig.HeaderToFieldMapping mapping : this.config.mappings) { log.trace("conversion() - adding field '{}' with schema {}", mapping.field, mapping.schema); builder.field(mapping.field, mapping.schema); ConversionHandler handler = ConversionHandler.of(mapping.schema, mapping.header, mapping.field); handlers.add(handler); } Schema newSchema = builder.build(); return Conversion.of(newSchema, handlers); }); }
Example #4
Source File: MongoSinkTaskTest.java From mongo-kafka with Apache License 2.0 | 6 votes |
private static Schema getRdbmsValueSchemaSample() { return SchemaBuilder.struct() .name("dbserver1.catalogA.tableB.Envelope") .field("op", Schema.STRING_SCHEMA) .field( "before", SchemaBuilder.struct() .name("dbserver1.catalogA.tableB.Value") .optional() .field("id", Schema.INT32_SCHEMA) .field("first_name", Schema.STRING_SCHEMA) .field("last_name", Schema.STRING_SCHEMA) .field("email", Schema.STRING_SCHEMA)) .field( "after", SchemaBuilder.struct() .name("dbserver1.catalogA.tableB.Value") .optional() .field("id", Schema.INT32_SCHEMA) .field("first_name", Schema.STRING_SCHEMA) .field("last_name", Schema.STRING_SCHEMA) .field("email", Schema.STRING_SCHEMA)) .field("source", Schema.STRING_SCHEMA); }
Example #5
Source File: CellData.java From debezium-incubator with Apache License 2.0 | 6 votes |
static Schema cellSchema(ColumnMetadata cm, boolean optional) { AbstractType<?> convertedType = CassandraTypeConverter.convert(cm.getType()); Schema valueSchema = CassandraTypeDeserializer.getSchemaBuilder(convertedType).optional().build(); if (valueSchema != null) { SchemaBuilder schemaBuilder = SchemaBuilder.struct().name(cm.getName()) .field(CELL_VALUE_KEY, valueSchema) .field(CELL_DELETION_TS_KEY, Schema.OPTIONAL_INT64_SCHEMA) .field(CELL_SET_KEY, Schema.BOOLEAN_SCHEMA); if (optional) { schemaBuilder.optional(); } return schemaBuilder.build(); } else { return null; } }
Example #6
Source File: SchemaKStream.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
Pair<Schema, SelectValueMapper> createSelectValueMapperAndSchema( final List<Pair<String, Expression>> expressionPairList ) { try { final CodeGenRunner codeGenRunner = new CodeGenRunner(schema, functionRegistry); final SchemaBuilder schemaBuilder = SchemaBuilder.struct(); final List<ExpressionMetadata> expressionEvaluators = new ArrayList<>(); for (Pair<String, Expression> expressionPair : expressionPairList) { final ExpressionMetadata expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree(expressionPair.getRight()); schemaBuilder.field(expressionPair.getLeft(), expressionEvaluator.getExpressionType()); expressionEvaluators.add(expressionEvaluator); } return new Pair<>(schemaBuilder.build(), new SelectValueMapper( genericRowValueTypeEnforcer, expressionPairList, expressionEvaluators )); } catch (Exception e) { throw new KsqlException("Code generation failed for SelectValueMapper", e); } }
Example #7
Source File: PlainPayloadFormatterTest.java From kafka-connect-lambda with Apache License 2.0 | 6 votes |
@Before public void setup() { keySchema = SchemaBuilder.struct() .name(TEST_KEY_CLASS) .field("key_name", Schema.STRING_SCHEMA) .build(); valueSchema = SchemaBuilder.struct() .name(TEST_VALUE_CLASS) .field(TEST_VALUE_FIELD, Schema.STRING_SCHEMA) .build(); keyStruct = new Struct(keySchema) .put("key_name", TEST_KEY); valueStruct = new Struct(valueSchema) .put(TEST_VALUE_FIELD, TEST_VALUE); keyList = new ArrayList<>(); keyList.add(TEST_KEY); keyMap = new HashMap<>(); keyMap.put(TEST_KEY, TEST_VALUE); valueList = new ArrayList<>(); valueList.add(TEST_VALUE); valueMap = new HashMap<>(); valueMap.put(TEST_VALUE_KEY, TEST_VALUE); formatter = new PlainPayloadFormatter(); mapper = new ObjectMapper(); }
Example #8
Source File: ProjectNodeTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test(expected = KsqlException.class) public void shouldThrowKsqlExcptionIfSchemaSizeDoesntMatchProjection() { mockSourceNode(); EasyMock.replay(source, stream); final ProjectNode node = new ProjectNode(new PlanNodeId("1"), source, SchemaBuilder.struct() .field("field1", Schema.STRING_SCHEMA) .field("field2", Schema.STRING_SCHEMA) .build(), Collections.singletonList(new BooleanLiteral("true"))); node.buildStream(builder, ksqlConfig, kafkaTopicClient, functionRegistry, props, new MockSchemaRegistryClient()); }
Example #9
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceDoubleAndEnforceDoubleOne() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("double", SchemaBuilder.float64()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); assertEquals(0.0, genericRowValueTypeEnforcer.enforceFieldType(0, 0)); }
Example #10
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceDoubleAndEnforceDoubleTwo() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("double", SchemaBuilder.float64()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); assertEquals(0.0, genericRowValueTypeEnforcer.enforceFieldType(0, 0.0F)); }
Example #11
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test(expected = NumberFormatException.class) public void testEnforceDoubleThrowsNumberFormatExceptionOnInvalidString() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("double", SchemaBuilder.float64()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); genericRowValueTypeEnforcer.enforceFieldType(0, "not a double"); }
Example #12
Source File: KsqlDelimitedSerializerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Before public void before() { orderSchema = SchemaBuilder.struct() .field("ordertime".toUpperCase(), org.apache.kafka.connect.data.Schema.INT64_SCHEMA) .field("orderid".toUpperCase(), org.apache.kafka.connect.data.Schema.INT64_SCHEMA) .field("itemid".toUpperCase(), org.apache.kafka.connect.data.Schema.STRING_SCHEMA) .field("orderunits".toUpperCase(), org.apache.kafka.connect.data.Schema.FLOAT64_SCHEMA) .build(); }
Example #13
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceLongReturningNull() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("long", SchemaBuilder.int64()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); assertNull(genericRowValueTypeEnforcer.enforceFieldType(0, null)); }
Example #14
Source File: MeasurementConverter.java From kafka-metrics with Apache License 2.0 | 5 votes |
@Override public void configure(Map<String, ?> configs, boolean isKey) { internalAvro = new InternalAvroSerde(); this.schema = SchemaBuilder.struct() .name("Measurement") .version(1) .field("timestamp", Schema.INT64_SCHEMA) .field("name", Schema.STRING_SCHEMA) .field("tags", SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA).schema()) .field("fields", SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.FLOAT64_SCHEMA).schema()) .build(); }
Example #15
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceLongReturningLongWhereByteValueIsNegative() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("long", SchemaBuilder.int64()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); assertEquals(-2315L, genericRowValueTypeEnforcer.enforceFieldType(0, -2315)); }
Example #16
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceLongAndEnforceLongOne() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("long", SchemaBuilder.int64()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); assertEquals(0L, genericRowValueTypeEnforcer.enforceFieldType(0, 0)); }
Example #17
Source File: AggregateNode.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private Schema buildAggregateSchema( final Schema schema, final FunctionRegistry functionRegistry, final InternalSchema internalSchema ) { final SchemaBuilder schemaBuilder = SchemaBuilder.struct(); final List<Field> fields = schema.fields(); for (int i = 0; i < getRequiredColumnList().size(); i++) { schemaBuilder.field(fields.get(i).name(), fields.get(i).schema()); } for (int aggFunctionVarSuffix = 0; aggFunctionVarSuffix < getFunctionList().size(); aggFunctionVarSuffix++) { String udafName = getFunctionList().get(aggFunctionVarSuffix).getName() .getSuffix(); KsqlAggregateFunction aggregateFunction = functionRegistry.getAggregateFunction( udafName, internalSchema.getInternalExpressionList( getFunctionList().get(aggFunctionVarSuffix).getArguments()), schema ); schemaBuilder.field( AggregateExpressionRewriter.AGGREGATE_FUNCTION_VARIABLE_PREFIX + aggFunctionVarSuffix, aggregateFunction.getReturnType() ); } return schemaBuilder.build(); }
Example #18
Source File: DataUtilityTest.java From kinesis-kafka-connector with Apache License 2.0 | 5 votes |
@Test public void parseByteValueTest(){ Schema schema = SchemaBuilder.bytes(); byte[] value = "Kinesis-Kafka Connector".getBytes(); ByteBuffer actual = DataUtility.parseValue(schema, value); ByteBuffer expected = ByteBuffer.wrap("Kinesis-Kafka Connector".getBytes()); Assert.assertTrue(actual.equals(expected)); }
Example #19
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceDoubleReturningDoubleWhereShortValueIsNegative() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("double", SchemaBuilder.float64()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); assertEquals(-433.0, genericRowValueTypeEnforcer.enforceFieldType(0, -433)); }
Example #20
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceLongOnValidString() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("long", SchemaBuilder.int64()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); assertEquals(-123L, genericRowValueTypeEnforcer.enforceFieldType(0, "-123")); }
Example #21
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceString() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("string", SchemaBuilder.string()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); try { genericRowValueTypeEnforcer.enforceFieldType(0, 0.0); fail("Expecting exception: KsqlException"); } catch (KsqlException e) { assertEquals(GenericRowValueTypeEnforcer.class.getName(), e.getStackTrace()[0].getClassName()); } }
Example #22
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceDoubleReturningDoubleWhereByteValueIsNegative() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("double", SchemaBuilder.float64()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); assertEquals((-1.0), genericRowValueTypeEnforcer.enforceFieldType(0, -1)); }
Example #23
Source File: GenericRowValueTypeEnforcerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void testEnforceIntegerOnValidCharSequence() { SchemaBuilder schemaBuilder = SchemaBuilder.struct().field("int", SchemaBuilder.int32()); GenericRowValueTypeEnforcer genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schemaBuilder); assertEquals(55, genericRowValueTypeEnforcer.enforceFieldType(0, new StringBuilder("55"))); }
Example #24
Source File: ExpressionTypeManagerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Before public void init() { metaStore = MetaStoreFixture.getNewMetaStore(); functionRegistry = new FunctionRegistry(); schema = SchemaBuilder.struct() .field("TEST1.COL0", SchemaBuilder.INT64_SCHEMA) .field("TEST1.COL1", SchemaBuilder.STRING_SCHEMA) .field("TEST1.COL2", SchemaBuilder.STRING_SCHEMA) .field("TEST1.COL3", SchemaBuilder.FLOAT64_SCHEMA); }
Example #25
Source File: TextFileReader.java From kafka-connect-fs with Apache License 2.0 | 5 votes |
@Override protected void configure(Map<String, String> config) { this.schema = SchemaBuilder.struct() .field(config.getOrDefault(FILE_READER_TEXT_FIELD_NAME_VALUE, FIELD_NAME_VALUE_DEFAULT), Schema.STRING_SCHEMA) .build(); this.recordPerLine = Boolean.parseBoolean(config.getOrDefault(FILE_READER_TEXT_RECORD_PER_LINE, "true")); String cType = config.getOrDefault(FILE_READER_TEXT_COMPRESSION_TYPE, CompressionType.NONE.toString()); boolean concatenated = Boolean.parseBoolean(config.getOrDefault(FILE_READER_TEXT_COMPRESSION_CONCATENATED, "true")); this.compression = CompressionType.fromName(cType, concatenated); this.charset = Charset.forName(config.getOrDefault(FILE_READER_TEXT_ENCODING, Charset.defaultCharset().name())); }
Example #26
Source File: SerDeUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
public static Schema getSchemaFromAvro(String avroSchemaString) { org.apache.avro.Schema.Parser parser = new org.apache.avro.Schema.Parser(); org.apache.avro.Schema avroSchema = parser.parse(avroSchemaString); SchemaBuilder inferredSchema = SchemaBuilder.struct().name(avroSchema.getName()); for (org.apache.avro.Schema.Field avroField: avroSchema.getFields()) { inferredSchema.field(avroField.name(), getKsqlSchemaForAvroSchema(avroField.schema())); } return inferredSchema.build(); }
Example #27
Source File: CamelTypeConverterTransform.java From camel-kafka-connector with Apache License 2.0 | 5 votes |
private Schema getOrBuildRecordSchema(final Schema originalSchema, final Object value) { final SchemaBuilder builder = SchemaUtil.copySchemaBasics(originalSchema, SchemaHelper.buildSchemaBuilderForType(value)); if (originalSchema.isOptional()) { builder.optional(); } if (originalSchema.defaultValue() != null) { builder.defaultValue(convertValueWithCamelTypeConverter(originalSchema.defaultValue())); } return builder.build(); }
Example #28
Source File: AggregateNode.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private Map<Integer, KsqlAggregateFunction> createAggValToFunctionMap( final SchemaKStream aggregateArgExpanded, final SchemaBuilder aggregateSchema, final KudafInitializer initializer, final int initialUdafIndex, final FunctionRegistry functionRegistry, final InternalSchema internalSchema ) { try { int udafIndexInAggSchema = initialUdafIndex; final Map<Integer, KsqlAggregateFunction> aggValToAggFunctionMap = new HashMap<>(); for (FunctionCall functionCall : getFunctionList()) { KsqlAggregateFunction aggregateFunctionInfo = functionRegistry .getAggregateFunction(functionCall.getName().toString(), internalSchema.getInternalExpressionList( functionCall.getArguments()), aggregateArgExpanded.getSchema() ); KsqlAggregateFunction aggregateFunction = aggregateFunctionInfo.getInstance( internalSchema.getInternalNameToIndexMap(), internalSchema.getInternalExpressionList(functionCall.getArguments()) ); aggValToAggFunctionMap.put(udafIndexInAggSchema++, aggregateFunction); initializer.addAggregateIntializer(aggregateFunction.getInitialValueSupplier()); aggregateSchema.field("AGG_COL_" + udafIndexInAggSchema, aggregateFunction.getReturnType()); } return aggValToAggFunctionMap; } catch (final Exception e) { throw new KsqlException( String.format( "Failed to create aggregate val to function map. expressionNames:%s", internalSchema.getInternalNameToIndexMap() ), e ); } }
Example #29
Source File: SqlToJavaVisitorTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Before public void init() { metaStore = MetaStoreFixture.getNewMetaStore(); functionRegistry = new FunctionRegistry(); schema = SchemaBuilder.struct() .field("TEST1.COL0", SchemaBuilder.INT64_SCHEMA) .field("TEST1.COL1", SchemaBuilder.STRING_SCHEMA) .field("TEST1.COL2", SchemaBuilder.STRING_SCHEMA) .field("TEST1.COL3", SchemaBuilder.FLOAT64_SCHEMA); codeGenRunner = new CodeGenRunner(schema, functionRegistry); }
Example #30
Source File: LogicalPlanner.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private AggregateNode buildAggregateNode( final Schema inputSchema, final PlanNode sourcePlanNode ) { SchemaBuilder aggregateSchema = SchemaBuilder.struct(); ExpressionTypeManager expressionTypeManager = new ExpressionTypeManager( inputSchema, functionRegistry ); for (int i = 0; i < analysis.getSelectExpressions().size(); i++) { Expression expression = analysis.getSelectExpressions().get(i); String alias = analysis.getSelectExpressionAlias().get(i); Schema expressionType = expressionTypeManager.getExpressionType(expression); aggregateSchema = aggregateSchema.field(alias, expressionType); } return new AggregateNode( new PlanNodeId("Aggregate"), sourcePlanNode, aggregateSchema, analysis.getGroupByExpressions(), analysis.getWindowExpression(), aggregateAnalysis.getAggregateFunctionArguments(), aggregateAnalysis.getFunctionList(), aggregateAnalysis.getRequiredColumnsList(), aggregateAnalysis.getFinalSelectExpressions(), aggregateAnalysis.getHavingExpression() ); }