org.apache.spark.sql.types.DecimalType Java Examples
The following examples show how to use
org.apache.spark.sql.types.DecimalType.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ColumnarBatchRow.java From spliceengine with GNU Affero General Public License v3.0 | 6 votes |
@Override public BigDecimal getDecimal(int i) { if (isNullAt(i)) { return null; } DataType dt = structType.fields()[i].dataType(); int precision = ((DecimalType) dt).precision(); int scale = ((DecimalType) dt).scale(); if (DecimalType.isByteArrayDecimalType(dt)) { byte[] bytes = row.getBinary(i); BigInteger bigInteger = new BigInteger(bytes); BigDecimal javaDecimal = new BigDecimal(bigInteger, scale); return Decimal.apply(javaDecimal, precision, scale).toJavaBigDecimal(); } else { return Decimal.apply(DecimalType.is32BitDecimalType(dt) ? getInt(i) : getLong(i), precision, scale).toJavaBigDecimal(); } }
Example #2
Source File: TestConfigurationDataTypes.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testGetSparkDataTypeValid() { assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.DECIMAL), new DecimalType()); assertEquals(ConfigurationDataTypes.getSparkDataType("decimal(38,38)"), new DecimalType(38,38)); assertEquals(ConfigurationDataTypes.getSparkDataType("decimal ( 38 , 38 ) "), new DecimalType(38,38)); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.STRING), DataTypes.StringType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.FLOAT), DataTypes.FloatType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.DOUBLE), DataTypes.DoubleType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.BYTE), DataTypes.ByteType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.SHORT), DataTypes.ShortType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.INT), DataTypes.IntegerType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.LONG), DataTypes.LongType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.BOOLEAN), DataTypes.BooleanType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.BINARY), DataTypes.BinaryType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.DATE), DataTypes.DateType); assertEquals(ConfigurationDataTypes.getSparkDataType(ConfigurationDataTypes.TIMESTAMP), DataTypes.TimestampType); }
Example #3
Source File: TestConfigurationDataTypes.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testGetConfigurationDataTypeValid() { assertEquals(ConfigurationDataTypes.getConfigurationDataType(new DecimalType()), "decimal(10,0)"); assertEquals(ConfigurationDataTypes.getConfigurationDataType(new DecimalType(38,38)), "decimal(38,38)"); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.StringType), ConfigurationDataTypes.STRING); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.FloatType), ConfigurationDataTypes.FLOAT); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.DoubleType), ConfigurationDataTypes.DOUBLE); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.ByteType), ConfigurationDataTypes.BYTE); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.ShortType), ConfigurationDataTypes.SHORT); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.IntegerType), ConfigurationDataTypes.INT); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.LongType), ConfigurationDataTypes.LONG); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.BooleanType), ConfigurationDataTypes.BOOLEAN); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.BinaryType), ConfigurationDataTypes.BINARY); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.DateType), ConfigurationDataTypes.DATE); assertEquals(ConfigurationDataTypes.getConfigurationDataType(DataTypes.TimestampType), ConfigurationDataTypes.TIMESTAMP); }
Example #4
Source File: ConfigurationDataTypes.java From envelope with Apache License 2.0 | 6 votes |
public static String getConfigurationDataType(DataType type) { Set<String> supportedTypes = Sets.newHashSet( STRING, FLOAT, DOUBLE, BYTE, SHORT, INT, LONG, BOOLEAN, BINARY, DATE, TIMESTAMP); if (supportedTypes.contains(type.typeName()) || (type instanceof DecimalType)) { return type.typeName(); } else if (type.typeName().equals("integer")) { return INT; } else { throw new RuntimeException("Unsupported field type: " + type); } }
Example #5
Source File: KVPTranslator.java From envelope with Apache License 2.0 | 6 votes |
@Override public Validations getValidations() { return Validations.builder() .mandatoryPath(KVP_DELIMITER_CONFIG_NAME, ConfigValueType.STRING) .mandatoryPath(FIELD_DELIMITER_CONFIG_NAME, ConfigValueType.STRING) .mandatoryPath(SCHEMA_CONFIG, ConfigValueType.OBJECT) .add(new SupportedFieldTypesValidation(SCHEMA_CONFIG, new HashSet<DataType>(Arrays.asList(new DecimalType(), DataTypes.StringType, DataTypes.FloatType, DataTypes.DoubleType, DataTypes.ShortType, DataTypes.IntegerType, DataTypes.LongType, DataTypes.BooleanType, DataTypes.BinaryType, DataTypes.DateType, DataTypes.TimestampType)))) .optionalPath(TIMESTAMP_FORMAT_CONFIG_NAME, ConfigValueType.LIST) .handlesOwnValidationPath(SCHEMA_CONFIG) .build(); }
Example #6
Source File: DelimitedTranslator.java From envelope with Apache License 2.0 | 6 votes |
@Override public Validations getValidations() { return Validations.builder() .mandatoryPath(DELIMITER_CONFIG_NAME, ConfigValueType.STRING) .mandatoryPath(SCHEMA_CONFIG, ConfigValueType.OBJECT) .add(new SupportedFieldTypesValidation(SCHEMA_CONFIG, new HashSet<DataType>(Arrays.asList(new DecimalType(), DataTypes.StringType, DataTypes.FloatType, DataTypes.DoubleType, DataTypes.ShortType, DataTypes.IntegerType, DataTypes.LongType, DataTypes.BooleanType, DataTypes.BinaryType, DataTypes.DateType, DataTypes.TimestampType)))) .optionalPath(DELIMITER_REGEX_CONFIG_NAME, ConfigValueType.BOOLEAN) .optionalPath(TIMESTAMP_FORMAT_CONFIG_NAME, ConfigValueType.LIST) .handlesOwnValidationPath(SCHEMA_CONFIG) .build(); }
Example #7
Source File: TestSupportedFieldTypesValidation.java From envelope with Apache License 2.0 | 5 votes |
@Test public void testValid() { SupportedFieldTypesValidation v = new SupportedFieldTypesValidation(SCHEMA_CONFIG, new HashSet<DataType>(Arrays.asList(DataTypes.IntegerType, DataTypes.StringType, new DecimalType(), DataTypes.TimestampType, DataTypes.BinaryType))); ValidationResult vr = v.validate(config); assertEquals(vr.getValidity(), Validity.VALID); }
Example #8
Source File: SQLDecimal.java From spliceengine with GNU Affero General Public License v3.0 | 5 votes |
@Override public StructField getStructField(String columnName) { if (precision == -1 || scale == -1) { return DataTypes.createStructField(columnName, DecimalType.SYSTEM_DEFAULT(),true); } else { return DataTypes.createStructField(columnName, DataTypes.createDecimalType(precision, scale), true); } }
Example #9
Source File: TestSupportedFieldTypesValidation.java From envelope with Apache License 2.0 | 5 votes |
@Test public void testInvalid() { SupportedFieldTypesValidation v = new SupportedFieldTypesValidation(SCHEMA_CONFIG, new HashSet<DataType>(Arrays.asList(DataTypes.IntegerType, new DecimalType(), DataTypes.TimestampType, DataTypes.BinaryType))); ValidationResult vr = v.validate(config); assertEquals(vr.getValidity(), Validity.INVALID); }
Example #10
Source File: StructInternalRow.java From iceberg with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("checkstyle:CyclomaticComplexity") public Object get(int ordinal, DataType dataType) { if (dataType instanceof IntegerType) { return getInt(ordinal); } else if (dataType instanceof LongType) { return getLong(ordinal); } else if (dataType instanceof StringType) { return getUTF8String(ordinal); } else if (dataType instanceof FloatType) { return getFloat(ordinal); } else if (dataType instanceof DoubleType) { return getDouble(ordinal); } else if (dataType instanceof DecimalType) { DecimalType decimalType = (DecimalType) dataType; return getDecimal(ordinal, decimalType.precision(), decimalType.scale()); } else if (dataType instanceof BinaryType) { return getBinary(ordinal); } else if (dataType instanceof StructType) { return getStruct(ordinal, ((StructType) dataType).size()); } else if (dataType instanceof ArrayType) { return getArray(ordinal); } else if (dataType instanceof MapType) { return getMap(ordinal); } else if (dataType instanceof BooleanType) { return getBoolean(ordinal); } else if (dataType instanceof ByteType) { return getByte(ordinal); } else if (dataType instanceof ShortType) { return getShort(ordinal); } return null; }
Example #11
Source File: SupportedFieldTypesValidation.java From envelope with Apache License 2.0 | 5 votes |
@Override public ValidationResult validate(Config config) { for (StructField field : ComponentFactory.create( Schema.class, config.getConfig(this.path), true).getSchema().fields()) { boolean decimalMatch = (field.dataType() instanceof DecimalType && validationTypes.contains(new DecimalType())); if (!validationTypes.contains(field.dataType()) && !decimalMatch) { return new ValidationResult(this, Validity.INVALID, "Schema field type " + field.dataType().simpleString() + " is not supported by this component type."); } } return new ValidationResult(this, Validity.VALID, "Schema field types are valid for this component type."); }
Example #12
Source File: SupportedFieldTypesValidation.java From envelope with Apache License 2.0 | 5 votes |
public SupportedFieldTypesValidation(String path, Set<DataType> validTypes) { this.path = path; for (DataType type : validTypes) { if (type instanceof DecimalType) { this.validationTypes.add(new DecimalType()); } else { this.validationTypes.add(type); } } }
Example #13
Source File: Reader.java From iceberg with Apache License 2.0 | 5 votes |
/** * Converts the objects into instances used by Spark's InternalRow. * * @param value a data value * @param type the Spark data type * @return the value converted to the representation expected by Spark's InternalRow. */ private static Object convert(Object value, DataType type) { if (type instanceof StringType) { return UTF8String.fromString(value.toString()); } else if (type instanceof BinaryType) { ByteBuffer buffer = (ByteBuffer) value; return buffer.get(new byte[buffer.remaining()]); } else if (type instanceof DecimalType) { return Decimal.fromDecimal(value); } return value; }
Example #14
Source File: TestFlatSchema.java From envelope with Apache License 2.0 | 4 votes |
@Test public void validSchema() { Map<String, Object> paramMap = new HashMap<>(); paramMap.put(ComponentFactory.TYPE_CONFIG_NAME, "flat"); paramMap.put(FlatSchema.FIELD_NAMES_CONFIG, Lists.newArrayList( "decimalField", "stringField", "floatField", "doubleField", "byteField", "shortField", "integerField", "longField", "booleanField", "binaryField", "dateField", "timestampField", "decimalField2")); paramMap.put(FlatSchema.FIELD_TYPES_CONFIG, Lists.newArrayList( "decimal(10,0)", "string", "float", "double", "byte", "short", "int", "long", "boolean", "binary", "date", "timestamp", " decimal ( 38 , 38 )")); config = ConfigFactory.parseMap(paramMap); FlatSchema flatSchema = new FlatSchema(); assertNoValidationFailures(flatSchema, config); flatSchema.configure(config); StructType schema = flatSchema.getSchema(); assertEquals(schema.fields().length, 13); assertEquals(schema.fields()[0].name(), "decimalField"); assertEquals(schema.fields()[1].name(), "stringField"); assertEquals(schema.fields()[2].name(), "floatField"); assertEquals(schema.fields()[3].name(), "doubleField"); assertEquals(schema.fields()[4].name(), "byteField"); assertEquals(schema.fields()[5].name(), "shortField"); assertEquals(schema.fields()[6].name(), "integerField"); assertEquals(schema.fields()[7].name(), "longField"); assertEquals(schema.fields()[8].name(), "booleanField"); assertEquals(schema.fields()[9].name(), "binaryField"); assertEquals(schema.fields()[10].name(), "dateField"); assertEquals(schema.fields()[11].name(), "timestampField"); assertEquals(schema.fields()[12].name(), "decimalField2"); assertEquals(schema.fields()[0].dataType(), new DecimalType(10,0)); assertEquals(schema.fields()[1].dataType(), DataTypes.StringType); assertEquals(schema.fields()[2].dataType(), DataTypes.FloatType); assertEquals(schema.fields()[3].dataType(), DataTypes.DoubleType); assertEquals(schema.fields()[4].dataType(), DataTypes.ByteType); assertEquals(schema.fields()[5].dataType(), DataTypes.ShortType); assertEquals(schema.fields()[6].dataType(), DataTypes.IntegerType); assertEquals(schema.fields()[7].dataType(), DataTypes.LongType); assertEquals(schema.fields()[8].dataType(), DataTypes.BooleanType); assertEquals(schema.fields()[9].dataType(), DataTypes.BinaryType); assertEquals(schema.fields()[10].dataType(), DataTypes.DateType); assertEquals(schema.fields()[11].dataType(), DataTypes.TimestampType); assertEquals(schema.fields()[12].dataType(), new DecimalType(38,38)); }
Example #15
Source File: AvroUtils.java From envelope with Apache License 2.0 | 4 votes |
private static Schema typeFor(DataType dataType, boolean isOptional, int recordCount) { LOG.trace("Converting {} to Avro, optional[{}]", dataType, isOptional); Schema typeSchema; SchemaBuilder.BaseTypeBuilder<Schema> typeBuilder = SchemaBuilder.builder(); switch (dataType.typeName()) { case "binary": // bytes typeSchema = typeBuilder.bytesType(); break; case "boolean": typeSchema = typeBuilder.booleanType(); break; case "date": // int (logical) typeSchema = LogicalTypes.date().addToSchema(typeBuilder.intType()); break; case "timestamp": // long (logical) typeSchema = LogicalTypes.timestampMillis().addToSchema(typeBuilder.longType()); break; case "double": typeSchema = typeBuilder.doubleType(); break; case "float": typeSchema = typeBuilder.floatType(); break; case "integer": case "byte": case "short": typeSchema = typeBuilder.intType(); break; case "long": typeSchema = typeBuilder.longType(); break; case "null": typeSchema = typeBuilder.nullType(); break; case "string": typeSchema = typeBuilder.stringType(); break; case "array": ArrayType arrayType = (ArrayType) dataType; typeSchema = typeBuilder.array().items(typeFor(arrayType.elementType(), arrayType.containsNull(), recordCount)); break; case "map": MapType mapType = (MapType) dataType; // Keys must be strings: mapType.keyType() typeSchema = typeBuilder.map().values(typeFor(mapType.valueType(), mapType.valueContainsNull(), recordCount)); break; case "struct": StructType structType = (StructType) dataType; // Nested "anonymous" records typeSchema = schemaFor(structType, null, null, recordCount); break; default: if (dataType.typeName().startsWith("decimal")) { // byte (logical) DecimalType decimalType = (DecimalType) dataType; typeSchema = LogicalTypes.decimal(decimalType.precision(), decimalType.scale()).addToSchema(typeBuilder.bytesType()); } else { throw new RuntimeException(String.format("DataType[%s] - DataType unrecognized or not yet implemented", dataType)); } } if (isOptional && !typeSchema.getType().equals(NULL)) { return SchemaBuilder.builder().nullable().type(typeSchema); } return typeSchema; }
Example #16
Source File: SchemaConverterTest.java From bunsen with Apache License 2.0 | 4 votes |
@Test public void bigDecimalToDecimal() { Assert.assertTrue( getField(observationSchema, true, "valueQuantity", "value") instanceof DecimalType); }
Example #17
Source File: DecimalColumnBlock.java From spliceengine with GNU Affero General Public License v3.0 | 4 votes |
public DecimalColumnBlock(ColumnVector columnVector, DataType type) { super(columnVector,type); decimalType = (DecimalType) type; }