Java Code Examples for org.apache.kafka.connect.data.Schema#INT64_SCHEMA
The following examples show how to use
org.apache.kafka.connect.data.Schema#INT64_SCHEMA .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TopkDistinctAggFunctionFactory.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Override public KsqlAggregateFunction getProperAggregateFunction(List<Schema> argTypeList) { if (argTypeList.isEmpty()) { throw new KsqlException("TOPKDISTINCT function should have two arguments."); } Schema argSchema = argTypeList.get(0); switch (argSchema.type()) { case INT32: return new TopkDistinctKudaf<>(-1, 0, Schema.INT32_SCHEMA, Integer.class); case INT64: return new TopkDistinctKudaf<>(-1, 0, Schema.INT64_SCHEMA, Long.class); case FLOAT64: return new TopkDistinctKudaf<>(-1, 0, Schema.FLOAT64_SCHEMA, Double.class); case STRING: return new TopkDistinctKudaf<>(-1, 0, Schema.STRING_SCHEMA, String.class); default: throw new KsqlException("No TOPKDISTINCT aggregate function with " + argTypeList.get(0) + " argument type exists!"); } }
Example 2
Source File: ExpressionTypeManager.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
private Schema resolveArithmaticType(final Schema leftSchema, final Schema rightSchema) { Schema.Type leftType = leftSchema.type(); Schema.Type rightType = rightSchema.type(); if (leftType == rightType) { return leftSchema; } else if (((leftType == Schema.Type.STRING) || (rightType == Schema.Type.STRING)) || ((leftType == Schema.Type.BOOLEAN) || (rightType == Schema.Type.BOOLEAN))) { throw new PlanException("Incompatible types."); } else if ((leftType == Schema.Type.FLOAT64) || (rightType == Schema.Type.FLOAT64)) { return Schema.FLOAT64_SCHEMA; } else if ((leftType == Schema.Type.INT64) || (rightType == Schema.Type.INT64)) { return Schema.INT64_SCHEMA; } else if ((leftType == Schema.Type.INT32) || (rightType == Schema.Type.INT32)) { return Schema.INT32_SCHEMA; } throw new PlanException("Unsupported types."); }
Example 3
Source File: SerDeUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
private static Schema getKsqlSchemaForAvroSchema(org.apache.avro.Schema avroSchema) { switch (avroSchema.getType()) { case INT: return Schema.INT32_SCHEMA; case LONG: return Schema.INT64_SCHEMA; case DOUBLE: case FLOAT: return Schema.FLOAT64_SCHEMA; case BOOLEAN: return Schema.BOOLEAN_SCHEMA; case STRING: return Schema.STRING_SCHEMA; case ARRAY: return SchemaBuilder.array(getKsqlSchemaForAvroSchema(avroSchema.getElementType())); case MAP: return SchemaBuilder.map(Schema.STRING_SCHEMA, getKsqlSchemaForAvroSchema(avroSchema.getValueType())); case UNION: return handleUnion(avroSchema); default: throw new KsqlException(String.format("KSQL doesn't currently support Avro type: %s", avroSchema.getFullName())); } }
Example 4
Source File: SchemaUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
public static Schema getTypeSchema(final String sqlType) { switch (sqlType) { case "VARCHAR": case "STRING": return Schema.STRING_SCHEMA; case "BOOLEAN": case "BOOL": return Schema.BOOLEAN_SCHEMA; case "INTEGER": case "INT": return Schema.INT32_SCHEMA; case "BIGINT": case "LONG": return Schema.INT64_SCHEMA; case "DOUBLE": return Schema.FLOAT64_SCHEMA; default: return getKsqlComplexType(sqlType); } }
Example 5
Source File: ZeebeSourceTask.java From kafka-connect-zeebe with Apache License 2.0 | 6 votes |
private SourceRecord transformJob(final ActivatedJob job) { final String topic = topicExtractor.extract(job); final Map<String, Integer> sourcePartition = Collections.singletonMap("partitionId", decodePartitionId(job.getKey())); // a better sourceOffset would be the position but we don't have it here unfortunately // key is however a monotonically increasing value, so in a sense it can provide a good // approximation of an offset final Map<String, Long> sourceOffset = Collections.singletonMap("key", job.getKey()); return new SourceRecord( sourcePartition, sourceOffset, topic, Schema.INT64_SCHEMA, job.getKey(), Schema.STRING_SCHEMA, job.toJson()); }
Example 6
Source File: UnivocityFileReader.java From kafka-connect-fs with Apache License 2.0 | 6 votes |
private Schema strToSchema(String dataType) { switch (DataType.valueOf(dataType.trim().toUpperCase())) { case BYTE: return dataTypeMappingError && !allowNulls ? Schema.INT8_SCHEMA : Schema.OPTIONAL_INT8_SCHEMA; case SHORT: return dataTypeMappingError && !allowNulls ? Schema.INT16_SCHEMA : Schema.OPTIONAL_INT16_SCHEMA; case INT: return dataTypeMappingError && !allowNulls ? Schema.INT32_SCHEMA : Schema.OPTIONAL_INT32_SCHEMA; case LONG: return dataTypeMappingError && !allowNulls ? Schema.INT64_SCHEMA : Schema.OPTIONAL_INT64_SCHEMA; case FLOAT: return dataTypeMappingError && !allowNulls ? Schema.FLOAT32_SCHEMA : Schema.OPTIONAL_FLOAT32_SCHEMA; case DOUBLE: return dataTypeMappingError && !allowNulls ? Schema.FLOAT64_SCHEMA : Schema.OPTIONAL_FLOAT64_SCHEMA; case BOOLEAN: return dataTypeMappingError && !allowNulls ? Schema.BOOLEAN_SCHEMA : Schema.OPTIONAL_BOOLEAN_SCHEMA; case BYTES: return dataTypeMappingError && !allowNulls ? Schema.BYTES_SCHEMA : Schema.OPTIONAL_BYTES_SCHEMA; case STRING: default: return dataTypeMappingError && !allowNulls ? Schema.STRING_SCHEMA : Schema.OPTIONAL_STRING_SCHEMA; } }
Example 7
Source File: SchemaTest.java From schema-registry-transfer-smt with Apache License 2.0 | 5 votes |
@Test public void testNonByteTypeSchemas() { Schema[] schemas = new Schema[]{ // Boolean Schema.BOOLEAN_SCHEMA, Schema.OPTIONAL_BOOLEAN_SCHEMA, // Integers Schema.INT8_SCHEMA, Schema.INT16_SCHEMA, Schema.INT32_SCHEMA, Schema.INT64_SCHEMA, Schema.OPTIONAL_INT8_SCHEMA, Schema.OPTIONAL_INT16_SCHEMA, Schema.OPTIONAL_INT32_SCHEMA, Schema.OPTIONAL_INT64_SCHEMA, // Floats Schema.FLOAT32_SCHEMA, Schema.FLOAT64_SCHEMA, Schema.OPTIONAL_FLOAT32_SCHEMA, Schema.OPTIONAL_FLOAT64_SCHEMA, // String Schema.STRING_SCHEMA, Schema.OPTIONAL_STRING_SCHEMA, // Struct with a field of bytes SchemaBuilder.struct().name("record"). field("foo", Schema.BYTES_SCHEMA) .build(), SchemaBuilder.struct().name("record"). field("foo", Schema.OPTIONAL_BYTES_SCHEMA) .build(), // map<bytes, bytes> SchemaBuilder.map(Schema.BYTES_SCHEMA, Schema.OPTIONAL_BYTES_SCHEMA).build(), // array<bytes> SchemaBuilder.array(Schema.OPTIONAL_BYTES_SCHEMA).build() }; for (Schema s : schemas) { assertFalse(ConnectSchemaUtil.isBytesSchema(s)); } }
Example 8
Source File: Int64FieldConverter.java From mongo-kafka with Apache License 2.0 | 4 votes |
public Int64FieldConverter() { super(Schema.INT64_SCHEMA); }
Example 9
Source File: CountKudaf.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 4 votes |
CountKudaf(int argIndexInValue) { super(argIndexInValue, () -> 0L, Schema.INT64_SCHEMA, Collections.singletonList(Schema.FLOAT64_SCHEMA) ); }
Example 10
Source File: LongSumKudaf.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 4 votes |
LongSumKudaf(int argIndexInValue) { super(argIndexInValue, () -> 0L, Schema.INT64_SCHEMA, Collections.singletonList(Schema.INT64_SCHEMA)); }
Example 11
Source File: LongMaxKudaf.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 4 votes |
LongMaxKudaf(int argIndexInValue) { super(argIndexInValue, () -> Long.MIN_VALUE, Schema.INT64_SCHEMA, Collections.singletonList(Schema.INT64_SCHEMA) ); }
Example 12
Source File: LongMinKudaf.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 4 votes |
LongMinKudaf(int argIndexInValue) { super(argIndexInValue, () -> Long.MAX_VALUE, Schema.INT64_SCHEMA, Collections.singletonList(Schema.INT64_SCHEMA) ); }
Example 13
Source File: SqlToJavaVisitor.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 4 votes |
protected Pair<String, Schema> visitLongLiteral(LongLiteral node, Boolean unmangleNames) { return new Pair<>("Long.parseLong(\"" + node.getValue() + "\")", Schema.INT64_SCHEMA); }
Example 14
Source File: SchemaUtilTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 4 votes |
@Test public void shouldGetTheCorrectJavaTypeForLong() { Schema schema = Schema.INT64_SCHEMA; Class javaClass = SchemaUtil.getJavaType(schema); assertThat(javaClass, equalTo(Long.class)); }
Example 15
Source File: OracleSourceConnectorUtils.java From kafka-connect-oracle with Apache License 2.0 | 4 votes |
protected void loadTable(String owner,String tableName,String operation) throws SQLException{ log.info("Getting dictionary details for table : {}",tableName); //SchemaBuilder dataSchemaBuiler = SchemaBuilder.struct().name((config.getDbNameAlias()+DOT+owner+DOT+tableName+DOT+"Value").toLowerCase()); SchemaBuilder dataSchemaBuiler = SchemaBuilder.struct().name("value"); String mineTableColsSql=OracleConnectorSQL.TABLE_WITH_COLS; if (config.getMultitenant()){ mineTableColsSql=OracleConnectorSQL.TABLE_WITH_COLS_CDB; } mineTableColsSql=mineTableColsSql.replace("$TABLE_OWNER$", owner).replace("$TABLE_NAME$", tableName); /*if (config.getMultitenant()) { mineTableCols=dbConn.prepareCall(sql.getContainerDictionarySQL()); } else { mineTableCols=dbConn.prepareCall(sql.getDictionarySQL()); } mineTableCols.setString(ConnectorSQL.PARAMETER_OWNER, owner); mineTableCols.setString(ConnectorSQL.PARAMETER_TABLE_NAME, tableName);*/ mineTableCols = dbConn.prepareCall(mineTableColsSql); mineTableColsResultSet=mineTableCols.executeQuery(); if (!mineTableColsResultSet.isBeforeFirst()) { // TODO: consider throwing up here, or an NPE will be thrown in OracleSourceTask.poll() log.warn("mineTableCols has no results for {}.{}", owner, tableName); } while(mineTableColsResultSet.next()){ String columnName = mineTableColsResultSet.getString(COLUMN_NAME_FIELD); Boolean nullable = mineTableColsResultSet.getString(NULLABLE_FIELD).equals("Y") ? true:false; String dataType = mineTableColsResultSet.getString(DATA_TYPE_FIELD); if (dataType.contains(TIMESTAMP_TYPE)) dataType=TIMESTAMP_TYPE; int dataLength = mineTableColsResultSet.getInt(DATA_LENGTH_FIELD); int dataScale = mineTableColsResultSet.getInt(DATA_SCALE_FIELD); int dataPrecision = mineTableColsResultSet.getInt(DATA_PRECISION_FIELD); Boolean pkColumn = mineTableColsResultSet.getInt(PK_COLUMN_FIELD)==1 ? true:false; Boolean uqColumn = mineTableColsResultSet.getInt(UQ_COLUMN_FIELD)==1 ? true:false; Schema columnSchema = null; switch (dataType){ case NUMBER_TYPE: { if (dataScale>0 || dataPrecision == 0){ columnSchema = nullable ? Schema.OPTIONAL_FLOAT64_SCHEMA : Schema.FLOAT64_SCHEMA; }else{ switch (dataPrecision){ case 1: case 2: columnSchema = nullable ? Schema.OPTIONAL_INT8_SCHEMA : Schema.INT8_SCHEMA; break; case 3: case 4: columnSchema = nullable ? Schema.OPTIONAL_INT16_SCHEMA : Schema.INT16_SCHEMA; break; case 5: case 6: case 7: case 8: case 9: columnSchema = nullable ? Schema.OPTIONAL_INT32_SCHEMA : Schema.INT32_SCHEMA; break; default: columnSchema = nullable ? Schema.OPTIONAL_INT64_SCHEMA : Schema.INT64_SCHEMA; break; } } break; } case "CHAR": case "VARCHAR": case "VARCHAR2": case "NCHAR": case "NVARCHAR": case "NVARCHAR2": case "LONG": case "CLOB": { columnSchema = nullable ? Schema.OPTIONAL_STRING_SCHEMA : Schema.STRING_SCHEMA; break; } case DATE_TYPE: case TIMESTAMP_TYPE: { columnSchema = nullable ? OPTIONAL_TIMESTAMP_SCHEMA : TIMESTAMP_SCHEMA; break; } default: columnSchema = nullable ? Schema.OPTIONAL_STRING_SCHEMA : Schema.STRING_SCHEMA; break; } dataSchemaBuiler.field(columnName,columnSchema); com.ecer.kafka.connect.oracle.models.Column column = new com.ecer.kafka.connect.oracle.models.Column(owner, tableName, columnName, nullable, dataType, dataLength, dataScale, pkColumn, uqColumn,columnSchema); String keyTabCols = owner+DOT+tableName+DOT+columnName; tabColsMap.put(keyTabCols, column); log.debug("tabColsMap entry added: {} = {}", keyTabCols, column.toString()); } Schema tSchema = dataSchemaBuiler.optional().build(); tableSchema.put(owner+DOT+tableName, tSchema); mineTableColsResultSet.close(); mineTableCols.close(); }
Example 16
Source File: Int64FieldConverter.java From kafka-connect-mongodb with Apache License 2.0 | 4 votes |
public Int64FieldConverter() { super(Schema.INT64_SCHEMA); }