Java Code Examples for org.apache.kafka.connect.data.SchemaBuilder#struct()
The following examples show how to use
org.apache.kafka.connect.data.SchemaBuilder#struct() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LogicalPlanner.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
private ProjectNode buildProjectNode(final Schema inputSchema, final PlanNode sourcePlanNode) { SchemaBuilder projectionSchema = SchemaBuilder.struct(); ExpressionTypeManager expressionTypeManager = new ExpressionTypeManager( inputSchema, functionRegistry ); for (int i = 0; i < analysis.getSelectExpressions().size(); i++) { Expression expression = analysis.getSelectExpressions().get(i); String alias = analysis.getSelectExpressionAlias().get(i); Schema expressionType = expressionTypeManager.getExpressionType(expression); projectionSchema = projectionSchema.field(alias, expressionType); } return new ProjectNode( new PlanNodeId("Project"), sourcePlanNode, projectionSchema, analysis.getSelectExpressions() ); }
Example 2
Source File: SchemaKStream.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
Pair<Schema, SelectValueMapper> createSelectValueMapperAndSchema( final List<Pair<String, Expression>> expressionPairList ) { try { final CodeGenRunner codeGenRunner = new CodeGenRunner(schema, functionRegistry); final SchemaBuilder schemaBuilder = SchemaBuilder.struct(); final List<ExpressionMetadata> expressionEvaluators = new ArrayList<>(); for (Pair<String, Expression> expressionPair : expressionPairList) { final ExpressionMetadata expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree(expressionPair.getRight()); schemaBuilder.field(expressionPair.getLeft(), expressionEvaluator.getExpressionType()); expressionEvaluators.add(expressionEvaluator); } return new Pair<>(schemaBuilder.build(), new SelectValueMapper( genericRowValueTypeEnforcer, expressionPairList, expressionEvaluators )); } catch (Exception e) { throw new KsqlException("Code generation failed for SelectValueMapper", e); } }
Example 3
Source File: AbstractCreateStreamCommand.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
private SchemaBuilder getStreamTableSchema(List<TableElement> tableElementList) { SchemaBuilder tableSchema = SchemaBuilder.struct(); for (TableElement tableElement : tableElementList) { if (tableElement.getName().equalsIgnoreCase(SchemaUtil.ROWTIME_NAME) || tableElement.getName() .equalsIgnoreCase(SchemaUtil.ROWKEY_NAME)) { throw new KsqlException( SchemaUtil.ROWTIME_NAME + "/" + SchemaUtil.ROWKEY_NAME + " are " + "reserved token for implicit column." + " You cannot use them as a column name."); } tableSchema = tableSchema.field( tableElement.getName(), SchemaUtil.getTypeSchema(tableElement.getType()) ); } return tableSchema; }
Example 4
Source File: SchemaUtilTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldCreateCorrectAvroSchemaWithNullableFields() { SchemaBuilder schemaBuilder = SchemaBuilder.struct(); schemaBuilder .field("ordertime", Schema.INT64_SCHEMA) .field("orderid", Schema.STRING_SCHEMA) .field("itemid", Schema.STRING_SCHEMA) .field("orderunits", Schema.FLOAT64_SCHEMA) .field("arraycol", SchemaBuilder.array(Schema.FLOAT64_SCHEMA)) .field("mapcol", SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.FLOAT64_SCHEMA)); String avroSchemaString = SchemaUtil.buildAvroSchema(schemaBuilder.build(), "orders"); assertThat(avroSchemaString, equalTo( "{\"type\":\"record\",\"name\":\"orders\",\"namespace\":\"ksql\",\"fields\":" + "[{\"name\":\"ordertime\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":" + "\"orderid\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"itemid\"," + "\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"orderunits\",\"type\":" + "[\"null\",\"double\"],\"default\":null},{\"name\":\"arraycol\",\"type\":[\"null\"," + "{\"type\":\"array\",\"items\":[\"null\",\"double\"]}],\"default\":null},{\"name\":" + "\"mapcol\",\"type\":[\"null\",{\"type\":\"map\",\"values\":[\"null\",\"double\"]}]" + ",\"default\":null}]}")); }
Example 5
Source File: LogicalPlanner.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private AggregateNode buildAggregateNode( final Schema inputSchema, final PlanNode sourcePlanNode ) { SchemaBuilder aggregateSchema = SchemaBuilder.struct(); ExpressionTypeManager expressionTypeManager = new ExpressionTypeManager( inputSchema, functionRegistry ); for (int i = 0; i < analysis.getSelectExpressions().size(); i++) { Expression expression = analysis.getSelectExpressions().get(i); String alias = analysis.getSelectExpressionAlias().get(i); Schema expressionType = expressionTypeManager.getExpressionType(expression); aggregateSchema = aggregateSchema.field(alias, expressionType); } return new AggregateNode( new PlanNodeId("Aggregate"), sourcePlanNode, aggregateSchema, analysis.getGroupByExpressions(), analysis.getWindowExpression(), aggregateAnalysis.getAggregateFunctionArguments(), aggregateAnalysis.getFunctionList(), aggregateAnalysis.getRequiredColumnsList(), aggregateAnalysis.getFinalSelectExpressions(), aggregateAnalysis.getHavingExpression() ); }
Example 6
Source File: AggregateNode.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private Schema buildAggregateSchema( final Schema schema, final FunctionRegistry functionRegistry, final InternalSchema internalSchema ) { final SchemaBuilder schemaBuilder = SchemaBuilder.struct(); final List<Field> fields = schema.fields(); for (int i = 0; i < getRequiredColumnList().size(); i++) { schemaBuilder.field(fields.get(i).name(), fields.get(i).schema()); } for (int aggFunctionVarSuffix = 0; aggFunctionVarSuffix < getFunctionList().size(); aggFunctionVarSuffix++) { String udafName = getFunctionList().get(aggFunctionVarSuffix).getName() .getSuffix(); KsqlAggregateFunction aggregateFunction = functionRegistry.getAggregateFunction( udafName, internalSchema.getInternalExpressionList( getFunctionList().get(aggFunctionVarSuffix).getArguments()), schema ); schemaBuilder.field( AggregateExpressionRewriter.AGGREGATE_FUNCTION_VARIABLE_PREFIX + aggFunctionVarSuffix, aggregateFunction.getReturnType() ); } return schemaBuilder.build(); }
Example 7
Source File: SchemaUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
public static Schema removeImplicitRowTimeRowKeyFromSchema(Schema schema) { SchemaBuilder schemaBuilder = SchemaBuilder.struct(); for (Field field : schema.fields()) { String fieldName = field.name(); fieldName = fieldName.substring(fieldName.indexOf('.') + 1); if (!fieldName.equalsIgnoreCase(SchemaUtil.ROWTIME_NAME) && !fieldName.equalsIgnoreCase(SchemaUtil.ROWKEY_NAME)) { schemaBuilder.field(fieldName, field.schema()); } } return schemaBuilder.build(); }
Example 8
Source File: SchemaUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
/** * Rename field names to be consistent with the internal column names. */ public static Schema getAvroSerdeKsqlSchema(Schema schema) { SchemaBuilder schemaBuilder = SchemaBuilder.struct(); for (Field field : schema.fields()) { schemaBuilder.field(field.name().replace(".", "_"), field.schema()); } return schemaBuilder.build(); }
Example 9
Source File: SchemaUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
/** * Remove the alias when reading/writing from outside */ public static Schema getSchemaWithNoAlias(Schema schema) { SchemaBuilder schemaBuilder = SchemaBuilder.struct(); for (Field field : schema.fields()) { String name = getFieldNameWithNoAlias(field); schemaBuilder.field(name, field.schema()); } return schemaBuilder.build(); }
Example 10
Source File: PatternRename.java From kafka-connect-transform-common with Apache License 2.0 | 5 votes |
@Override protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct inputStruct) { final SchemaBuilder outputSchemaBuilder = SchemaBuilder.struct(); outputSchemaBuilder.name(inputSchema.name()); outputSchemaBuilder.doc(inputSchema.doc()); if (null != inputSchema.defaultValue()) { outputSchemaBuilder.defaultValue(inputSchema.defaultValue()); } if (null != inputSchema.parameters() && !inputSchema.parameters().isEmpty()) { outputSchemaBuilder.parameters(inputSchema.parameters()); } if (inputSchema.isOptional()) { outputSchemaBuilder.optional(); } Map<String, String> fieldMappings = new HashMap<>(inputSchema.fields().size()); for (final Field inputField : inputSchema.fields()) { log.trace("process() - Processing field '{}'", inputField.name()); final Matcher fieldMatcher = this.config.pattern.matcher(inputField.name()); final String outputFieldName; if (fieldMatcher.find()) { outputFieldName = fieldMatcher.replaceAll(this.config.replacement); } else { outputFieldName = inputField.name(); } log.trace("process() - Mapping field '{}' to '{}'", inputField.name(), outputFieldName); fieldMappings.put(inputField.name(), outputFieldName); outputSchemaBuilder.field(outputFieldName, inputField.schema()); } final Schema outputSchema = outputSchemaBuilder.build(); final Struct outputStruct = new Struct(outputSchema); for (Map.Entry<String, String> entry : fieldMappings.entrySet()) { final String inputField = entry.getKey(), outputField = entry.getValue(); log.trace("process() - Copying '{}' to '{}'", inputField, outputField); final Object value = inputStruct.get(inputField); outputStruct.put(outputField, value); } return new SchemaAndValue(outputSchema, outputStruct); }
Example 11
Source File: JsonFileReader.java From kafka-connect-fs with Apache License 2.0 | 4 votes |
private static Schema extractSchema(JsonNode jsonNode) { switch (jsonNode.getNodeType()) { case BOOLEAN: return Schema.OPTIONAL_BOOLEAN_SCHEMA; case NUMBER: if (jsonNode.isShort()) { return Schema.OPTIONAL_INT8_SCHEMA; } else if (jsonNode.isInt()) { return Schema.OPTIONAL_INT32_SCHEMA; } else if (jsonNode.isLong()) { return Schema.OPTIONAL_INT64_SCHEMA; } else if (jsonNode.isFloat()) { return Schema.OPTIONAL_FLOAT32_SCHEMA; } else if (jsonNode.isDouble()) { return Schema.OPTIONAL_FLOAT64_SCHEMA; } else if (jsonNode.isBigInteger()) { return Schema.OPTIONAL_INT64_SCHEMA; } else if (jsonNode.isBigDecimal()) { return Schema.OPTIONAL_FLOAT64_SCHEMA; } else { return Schema.OPTIONAL_FLOAT64_SCHEMA; } case STRING: return Schema.OPTIONAL_STRING_SCHEMA; case BINARY: return Schema.OPTIONAL_BYTES_SCHEMA; case ARRAY: Iterable<JsonNode> elements = jsonNode::elements; Schema arraySchema = StreamSupport.stream(elements.spliterator(), false) .findFirst().map(JsonFileReader::extractSchema) .orElse(SchemaBuilder.struct().build()); return SchemaBuilder.array(arraySchema).build(); case OBJECT: SchemaBuilder builder = SchemaBuilder.struct(); jsonNode.fields() .forEachRemaining(field -> builder.field(field.getKey(), extractSchema(field.getValue()))); return builder.build(); default: return SchemaBuilder.struct().optional().build(); } }