Java Code Examples for org.apache.spark.sql.types.DataTypes#BooleanType
The following examples show how to use
org.apache.spark.sql.types.DataTypes#BooleanType .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LocalWithSparkSessionTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 6 votes |
private static DataType convertType(org.apache.kylin.metadata.datatype.DataType type) { if (type.isTimeFamily()) return DataTypes.TimestampType; if (type.isDateTimeFamily()) return DataTypes.DateType; if (type.isIntegerFamily()) return DataTypes.LongType; if (type.isNumberFamily()) return DataTypes.createDecimalType(19, 4); if (type.isStringFamily()) return DataTypes.StringType; if (type.isBoolean()) return DataTypes.BooleanType; throw new IllegalArgumentException("KAP data type: " + type + " can not be converted to spark's type."); }
Example 2
Source File: TestDecisionStep.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testPruneByStepValueTrue() { StructType schema = new StructType(new StructField[] { new StructField("outcome", DataTypes.BooleanType, false, Metadata.empty()) }); List<Row> rows = Lists.newArrayList( RowFactory.create(true) ); Dataset<Row> ds = Contexts.getSparkSession().createDataFrame(rows, schema); step1.setData(ds); Map<String, Object> step2ConfigMap = Maps.newHashMap(); step2ConfigMap.put(Step.DEPENDENCIES_CONFIG, Lists.newArrayList("step1")); step2ConfigMap.put(DecisionStep.IF_TRUE_STEP_NAMES_PROPERTY, Lists.newArrayList("step3", "step7")); step2ConfigMap.put(DecisionStep.DECISION_METHOD_PROPERTY, DecisionStep.STEP_BY_VALUE_DECISION_METHOD); step2ConfigMap.put(DecisionStep.STEP_BY_VALUE_STEP_PROPERTY, "step1"); Config step2Config = ConfigFactory.parseMap(step2ConfigMap); RefactorStep step2 = new DecisionStep("step2"); step2.configure(step2Config); steps.add(step2); Set<Step> refactored = step2.refactor(steps); assertEquals(refactored, Sets.newHashSet(step1, step2, step3, step4, step7, step8)); }
Example 3
Source File: TestRowUtils.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testToRowValueBoolean() { DataType field = DataTypes.BooleanType; assertEquals("Invalid Boolean", true, RowUtils.toRowValue(true, field)); assertEquals("Invalid 'true'", true, RowUtils.toRowValue("true", field)); assertEquals("Invalid 'true'", true, RowUtils.toRowValue("TrUe", field)); assertEquals("Invalid 'false'", false, RowUtils.toRowValue("false", field)); assertEquals("Invalid 'false'", false, RowUtils.toRowValue("FaLsE", field)); try { RowUtils.toRowValue(123, field); fail("Expected a RuntimeException for invalid type"); } catch (RuntimeException e) { assertThat(e.getMessage(), CoreMatchers.containsString("Invalid or unrecognized input format")); } }
Example 4
Source File: SchemaConverter.java From geowave with Apache License 2.0 | 6 votes |
private static AttributeDescriptor attrDescFromStructField( final AttributeTypeBuilder attrBuilder, final StructField field) { if (field.name().equals("geom")) { return attrBuilder.binding(Geometry.class).nillable(false).buildDescriptor("geom"); } if (field.dataType() == DataTypes.StringType) { return attrBuilder.binding(String.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.DoubleType) { return attrBuilder.binding(Double.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.FloatType) { return attrBuilder.binding(Float.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.LongType) { return attrBuilder.binding(Long.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.IntegerType) { return attrBuilder.binding(Integer.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.BooleanType) { return attrBuilder.binding(Boolean.class).buildDescriptor(field.name()); } else if (field.dataType() == DataTypes.TimestampType) { return attrBuilder.binding(Date.class).buildDescriptor(field.name()); } return null; }
Example 5
Source File: TestDeletePlanner.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testPlanner() { List<Row> rows = Lists.newArrayList(RowFactory.create("a", 1, false), RowFactory.create("b", 2, true)); StructType schema = new StructType(new StructField[] { new StructField("field1", DataTypes.StringType, false, null), new StructField("field2", DataTypes.IntegerType, false, null), new StructField("field3", DataTypes.BooleanType, false, null) }); Dataset<Row> data = Contexts.getSparkSession().createDataFrame(rows, schema); BulkPlanner p = new DeletePlanner(); p.configure(ConfigFactory.empty()); List<Tuple2<MutationType, Dataset<Row>>> planned = p.planMutationsForSet(data); assertEquals(1, planned.size()); assertEquals(MutationType.DELETE, planned.get(0)._1()); assertEquals(data, planned.get(0)._2()); }
Example 6
Source File: SparkTempViewProvider.java From hudi with Apache License 2.0 | 6 votes |
private DataType getDataType(Comparable comparable) { if (comparable instanceof Integer) { return DataTypes.IntegerType; } if (comparable instanceof Double) { return DataTypes.DoubleType; } if (comparable instanceof Long) { return DataTypes.LongType; } if (comparable instanceof Boolean) { return DataTypes.BooleanType; } // TODO add additional types when needed. default to string return DataTypes.StringType; }
Example 7
Source File: SchemaUtil.java From jpmml-evaluator-spark with GNU Affero General Public License v3.0 | 6 votes |
static public DataType translateDataType(org.dmg.pmml.DataType dataType){ switch(dataType){ case STRING: return DataTypes.StringType; case INTEGER: return DataTypes.IntegerType; case FLOAT: return DataTypes.FloatType; case DOUBLE: return DataTypes.DoubleType; case BOOLEAN: return DataTypes.BooleanType; default: throw new IllegalArgumentException(); } }
Example 8
Source File: TestDecisionStep.java From envelope with Apache License 2.0 | 6 votes |
@Test public void testPruneByStepValueFalse() { StructType schema = new StructType(new StructField[] { new StructField("outcome", DataTypes.BooleanType, false, Metadata.empty()) }); List<Row> rows = Lists.newArrayList( RowFactory.create(false) ); Dataset<Row> ds = Contexts.getSparkSession().createDataFrame(rows, schema); step1.setData(ds); Map<String, Object> step2ConfigMap = Maps.newHashMap(); step2ConfigMap.put(Step.DEPENDENCIES_CONFIG, Lists.newArrayList("step1")); step2ConfigMap.put(DecisionStep.IF_TRUE_STEP_NAMES_PROPERTY, Lists.newArrayList("step3", "step7")); step2ConfigMap.put(DecisionStep.DECISION_METHOD_PROPERTY, DecisionStep.STEP_BY_VALUE_DECISION_METHOD); step2ConfigMap.put(DecisionStep.STEP_BY_VALUE_STEP_PROPERTY, "step1"); Config step2Config = ConfigFactory.parseMap(step2ConfigMap); RefactorStep step2 = new DecisionStep("step2"); step2.configure(step2Config); steps.add(step2); Set<Step> refactored = step2.refactor(steps); assertEquals(refactored, Sets.newHashSet(step1, step2, step5, step6)); }
Example 9
Source File: TypeCastStep.java From bpmn.ai with BSD 3-Clause "New" or "Revised" License | 6 votes |
private DataType mapDataType(List<StructField> datasetFields, String column, String typeConfig) { DataType currentDatatype = getCurrentDataType(datasetFields, column); // when typeConfig is null (no config for this column), return the current DataType if(typeConfig == null) { return currentDatatype; } switch (typeConfig) { case "integer": return DataTypes.IntegerType; case "long": return DataTypes.LongType; case "double": return DataTypes.DoubleType; case "boolean": return DataTypes.BooleanType; case "date": return DataTypes.DateType; case "timestamp": return DataTypes.TimestampType; default: return DataTypes.StringType; } }
Example 10
Source File: FrameRDDConverterUtils.java From systemds with Apache License 2.0 | 5 votes |
/** * This function will convert Frame schema into DataFrame schema * * @param fschema frame schema * @param containsID true if contains ID column * @return Spark StructType of StructFields representing schema */ public static StructType convertFrameSchemaToDFSchema(ValueType[] fschema, boolean containsID) { // generate the schema based on the string of schema List<StructField> fields = new ArrayList<>(); // add id column type if( containsID ) fields.add(DataTypes.createStructField(RDDConverterUtils.DF_ID_COLUMN, DataTypes.DoubleType, true)); // add remaining types int col = 1; for (ValueType schema : fschema) { DataType dt = null; switch(schema) { case STRING: dt = DataTypes.StringType; break; case FP64: dt = DataTypes.DoubleType; break; case INT64: dt = DataTypes.LongType; break; case BOOLEAN: dt = DataTypes.BooleanType; break; default: dt = DataTypes.StringType; LOG.warn("Using default type String for " + schema.toString()); } fields.add(DataTypes.createStructField("C"+col++, dt, true)); } return DataTypes.createStructType(fields); }
Example 11
Source File: DecisionStep.java From envelope with Apache License 2.0 | 5 votes |
private boolean evaluateStepByKeyDecision(Set<Step> steps) { Optional<Step> optionalStep = StepUtils.getStepForName(stepByKeyStepName, steps); if (!optionalStep.isPresent()) { throw new RuntimeException("Unknown decision step's key step: " + stepByValueStepName); } if (!(optionalStep.get() instanceof DataStep)) { throw new RuntimeException("Decision step's key step is not a data step: " + optionalStep.get().getName()); } Dataset<Row> keyDataset = ((DataStep)optionalStep.get()).getData(); if (keyDataset.schema().fields().length != 2 || keyDataset.schema().fields()[0].dataType() != DataTypes.StringType || keyDataset.schema().fields()[1].dataType() != DataTypes.BooleanType) { throw new RuntimeException("Decision step's key step must contain a string column and then a boolean column"); } String keyColumnName = keyDataset.schema().fieldNames()[0]; String whereClause = keyColumnName + " = '" + stepByKeyKey + "'"; Dataset<Row> decisionDataset = keyDataset.where(whereClause); if (decisionDataset.count() != 1) { throw new RuntimeException("Decision step's key step must contain a single record for the given key"); } boolean decision = decisionDataset.collectAsList().get(0).getBoolean(1); return decision; }
Example 12
Source File: SchemaConverter.java From geowave with Apache License 2.0 | 5 votes |
private static SimpleFeatureDataType attrDescToDataType(final AttributeDescriptor attrDesc) { boolean isGeom = false; DataType dataTypeOut = DataTypes.NullType; if (attrDesc.getType().getBinding().equals(String.class)) { dataTypeOut = DataTypes.StringType; } else if (attrDesc.getType().getBinding().equals(Double.class)) { dataTypeOut = DataTypes.DoubleType; } else if (attrDesc.getType().getBinding().equals(Float.class)) { dataTypeOut = DataTypes.FloatType; } else if (attrDesc.getType().getBinding().equals(Long.class)) { dataTypeOut = DataTypes.LongType; } else if (attrDesc.getType().getBinding().equals(Integer.class)) { dataTypeOut = DataTypes.IntegerType; } else if (attrDesc.getType().getBinding().equals(Boolean.class)) { dataTypeOut = DataTypes.BooleanType; } else if (attrDesc.getType().getBinding().equals(Date.class)) { dataTypeOut = DataTypes.TimestampType; } // Custom geometry types get WKB encoding else if (Geometry.class.isAssignableFrom(attrDesc.getType().getBinding())) { dataTypeOut = GeoWaveSpatialEncoders.geometryUDT; isGeom = true; } return new SimpleFeatureDataType(dataTypeOut, isGeom); }
Example 13
Source File: FrameRDDConverterUtils.java From systemds with Apache License 2.0 | 5 votes |
/** * This function will convert Frame schema into DataFrame schema * * @param fschema frame schema * @param containsID true if contains ID column * @return Spark StructType of StructFields representing schema */ public static StructType convertFrameSchemaToDFSchema(ValueType[] fschema, boolean containsID) { // generate the schema based on the string of schema List<StructField> fields = new ArrayList<>(); // add id column type if( containsID ) fields.add(DataTypes.createStructField(RDDConverterUtils.DF_ID_COLUMN, DataTypes.DoubleType, true)); // add remaining types int col = 1; for (ValueType schema : fschema) { DataType dt = null; switch(schema) { case STRING: dt = DataTypes.StringType; break; case FP64: dt = DataTypes.DoubleType; break; case INT64: dt = DataTypes.LongType; break; case BOOLEAN: dt = DataTypes.BooleanType; break; default: dt = DataTypes.StringType; LOG.warn("Using default type String for " + schema.toString()); } fields.add(DataTypes.createStructField("C"+col++, dt, true)); } return DataTypes.createStructType(fields); }
Example 14
Source File: TestDecisionStep.java From envelope with Apache License 2.0 | 5 votes |
@Test public void testPruneByStepKeyTrue() { StructType schema = new StructType(new StructField[] { new StructField("name", DataTypes.StringType, false, Metadata.empty()), new StructField("result", DataTypes.BooleanType, false, Metadata.empty()) }); List<Row> rows = Lists.newArrayList( RowFactory.create("namecheck", false), RowFactory.create("agerange", true) ); Dataset<Row> ds = Contexts.getSparkSession().createDataFrame(rows, schema); step1.setData(ds); Map<String, Object> step2ConfigMap = Maps.newHashMap(); step2ConfigMap.put(Step.DEPENDENCIES_CONFIG, Lists.newArrayList("step1")); step2ConfigMap.put(DecisionStep.IF_TRUE_STEP_NAMES_PROPERTY, Lists.newArrayList("step3", "step7")); step2ConfigMap.put(DecisionStep.DECISION_METHOD_PROPERTY, DecisionStep.STEP_BY_KEY_DECISION_METHOD); step2ConfigMap.put(DecisionStep.STEP_BY_KEY_STEP_PROPERTY, "step1"); step2ConfigMap.put(DecisionStep.STEP_BY_KEY_KEY_PROPERTY, "agerange"); Config step2Config = ConfigFactory.parseMap(step2ConfigMap); RefactorStep step2 = new DecisionStep("step2"); step2.configure(step2Config); steps.add(step2); Set<Step> refactored = step2.refactor(steps); assertEquals(refactored, Sets.newHashSet(step1, step2, step3, step4, step7, step8)); }
Example 15
Source File: TestDecisionStep.java From envelope with Apache License 2.0 | 5 votes |
@Test public void testPruneByStepKeyFalse() { StructType schema = new StructType(new StructField[] { new StructField("name", DataTypes.StringType, false, Metadata.empty()), new StructField("result", DataTypes.BooleanType, false, Metadata.empty()) }); List<Row> rows = Lists.newArrayList( RowFactory.create("namecheck", false), RowFactory.create("agerange", true) ); Dataset<Row> ds = Contexts.getSparkSession().createDataFrame(rows, schema); step1.setData(ds); Map<String, Object> step2ConfigMap = Maps.newHashMap(); step2ConfigMap.put(Step.DEPENDENCIES_CONFIG, Lists.newArrayList("step1")); step2ConfigMap.put(DecisionStep.IF_TRUE_STEP_NAMES_PROPERTY, Lists.newArrayList("step3", "step7")); step2ConfigMap.put(DecisionStep.DECISION_METHOD_PROPERTY, DecisionStep.STEP_BY_KEY_DECISION_METHOD); step2ConfigMap.put(DecisionStep.STEP_BY_KEY_STEP_PROPERTY, "step1"); step2ConfigMap.put(DecisionStep.STEP_BY_KEY_KEY_PROPERTY, "namecheck"); Config step2Config = ConfigFactory.parseMap(step2ConfigMap); RefactorStep step2 = new DecisionStep("step2"); step2.configure(step2Config); steps.add(step2); Set<Step> refactored = step2.refactor(steps); assertEquals(refactored, Sets.newHashSet(step1, step2, step5, step6)); }
Example 16
Source File: SparkSturctTypeUtil.java From waterdrop with Apache License 2.0 | 4 votes |
private static DataType getType(String type) { DataType dataType = DataTypes.NullType; switch (type.toLowerCase()) { case "string": dataType = DataTypes.StringType; break; case "integer": dataType = DataTypes.IntegerType; break; case "long": dataType = DataTypes.LongType; break; case "double": dataType = DataTypes.DoubleType; break; case "float": dataType = DataTypes.FloatType; break; case "short": dataType = DataTypes.ShortType; break; case "date": dataType = DataTypes.DateType; break; case "timestamp": dataType = DataTypes.TimestampType; break; case "boolean": dataType = DataTypes.BooleanType; break; case "binary": dataType = DataTypes.BinaryType; break; case "byte": dataType = DataTypes.ByteType; break; default: throw new ConfigRuntimeException("Throw data type exception, unknown type: " + type); } return dataType; }
Example 17
Source File: KuduOutput.java From envelope with Apache License 2.0 | 4 votes |
private StructType schemaFor(KuduTable table) { List<StructField> fields = Lists.newArrayList(); for (ColumnSchema columnSchema : table.getSchema().getColumns()) { DataType fieldType; switch (columnSchema.getType()) { case DOUBLE: fieldType = DataTypes.DoubleType; break; case FLOAT: fieldType = DataTypes.FloatType; break; case INT8: fieldType = DataTypes.ByteType; break; case INT16: fieldType = DataTypes.ShortType; break; case INT32: fieldType = DataTypes.IntegerType; break; case INT64: fieldType = DataTypes.LongType; break; case STRING: fieldType = DataTypes.StringType; break; case BOOL: fieldType = DataTypes.BooleanType; break; case BINARY: fieldType = DataTypes.BinaryType; break; case UNIXTIME_MICROS: fieldType = DataTypes.TimestampType; break; case DECIMAL: int precision = columnSchema.getTypeAttributes().getPrecision(); int scale = columnSchema.getTypeAttributes().getScale(); fieldType = DataTypes.createDecimalType(precision, scale); break; default: throw new RuntimeException("Unsupported Kudu column type: " + columnSchema.getType()); } fields.add(DataTypes.createStructField(columnSchema.getName(), fieldType, true)); } return DataTypes.createStructType(fields); }
Example 18
Source File: AvroUtils.java From envelope with Apache License 2.0 | 4 votes |
/** * Convert Avro Types into their associated DataType. * * @param schemaType Avro Schema.Type * @return DataType representation */ public static DataType dataTypeFor(Schema schemaType) { LOG.trace("Converting Schema[{}] to DataType", schemaType); // Unwrap "optional" unions to the base type boolean isOptional = isNullable(schemaType); if (isOptional) { // if only 2 items in the union, then "unwrap," otherwise, it's a full union and should be rendered as such if (schemaType.getTypes().size() == 2) { LOG.trace("Unwrapping simple 'optional' union for {}", schemaType); for (Schema s : schemaType.getTypes()) { if (s.getType().equals(NULL)) { continue; } // Unwrap schemaType = s; break; } } } // Convert supported LogicalTypes if (null != schemaType.getLogicalType()) { LogicalType logicalType = schemaType.getLogicalType(); switch (logicalType.getName()) { case "date" : return DataTypes.DateType; case "timestamp-millis" : return DataTypes.TimestampType; case "decimal" : LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) logicalType; return DataTypes.createDecimalType(decimal.getPrecision(), decimal.getScale()); default: // Pass-thru LOG.warn("Unsupported LogicalType[{}], continuing with underlying base type", logicalType.getName()); } } switch (schemaType.getType()) { case RECORD: // StructType List<StructField> structFieldList = Lists.newArrayListWithCapacity(schemaType.getFields().size()); for (Field f : schemaType.getFields()) { structFieldList.add(DataTypes.createStructField(f.name(), dataTypeFor(f.schema()), isNullable(f.schema()))); } return DataTypes.createStructType(structFieldList); case ARRAY: Schema elementType = schemaType.getElementType(); return DataTypes.createArrayType(dataTypeFor(elementType), isNullable(elementType)); case MAP: Schema valueType = schemaType.getValueType(); return DataTypes.createMapType(DataTypes.StringType, dataTypeFor(valueType), isNullable(valueType)); case UNION: // StructType of members List<StructField> unionFieldList = Lists.newArrayListWithCapacity(schemaType.getTypes().size()); int m = 0; for (Schema u : schemaType.getTypes()) { unionFieldList.add(DataTypes.createStructField("member" + m++, dataTypeFor(u), isNullable(u))); } return DataTypes.createStructType(unionFieldList); case FIXED: case BYTES: return DataTypes.BinaryType; case ENUM: case STRING: return DataTypes.StringType; case INT: return DataTypes.IntegerType; case LONG: return DataTypes.LongType; case FLOAT: return DataTypes.FloatType; case DOUBLE: return DataTypes.DoubleType; case BOOLEAN: return DataTypes.BooleanType; case NULL: return DataTypes.NullType; default: throw new RuntimeException(String.format("Unrecognized or unsupported Avro Type conversion: %s", schemaType)); } }
Example 19
Source File: ConfigurationDataTypes.java From envelope with Apache License 2.0 | 4 votes |
public static DataType getSparkDataType(String typeString) { DataType type; String prec_scale_regex_groups = "\\s*(decimal)\\s*\\(\\s*(\\d+)\\s*,\\s*(\\d+)\\s*\\)\\s*"; Pattern prec_scale_regex_pattern = Pattern.compile(prec_scale_regex_groups); Matcher prec_scale_regex_matcher = prec_scale_regex_pattern.matcher(typeString); if (prec_scale_regex_matcher.matches()) { int precision = Integer.parseInt(prec_scale_regex_matcher.group(2)); int scale = Integer.parseInt(prec_scale_regex_matcher.group(3)); type = DataTypes.createDecimalType(precision, scale); } else { switch (typeString) { case DECIMAL: type = DataTypes.createDecimalType(); break; case STRING: type = DataTypes.StringType; break; case FLOAT: type = DataTypes.FloatType; break; case DOUBLE: type = DataTypes.DoubleType; break; case BYTE: type = DataTypes.ByteType; break; case SHORT: type = DataTypes.ShortType; break; case INT: type = DataTypes.IntegerType; break; case LONG: type = DataTypes.LongType; break; case BOOLEAN: type = DataTypes.BooleanType; break; case BINARY: type = DataTypes.BinaryType; break; case DATE: type = DataTypes.DateType; break; case TIMESTAMP: type = DataTypes.TimestampType; break; default: throw new RuntimeException("Unsupported or unrecognized field type: " + typeString); } } return type; }
Example 20
Source File: FlightDataSourceReader.java From flight-spark-source with Apache License 2.0 | 4 votes |
private DataType sparkFromArrow(FieldType fieldType) { switch (fieldType.getType().getTypeID()) { case Null: return DataTypes.NullType; case Struct: throw new UnsupportedOperationException("have not implemented Struct type yet"); case List: throw new UnsupportedOperationException("have not implemented List type yet"); case FixedSizeList: throw new UnsupportedOperationException("have not implemented FixedSizeList type yet"); case Union: throw new UnsupportedOperationException("have not implemented Union type yet"); case Int: ArrowType.Int intType = (ArrowType.Int) fieldType.getType(); int bitWidth = intType.getBitWidth(); if (bitWidth == 8) { return DataTypes.ByteType; } else if (bitWidth == 16) { return DataTypes.ShortType; } else if (bitWidth == 32) { return DataTypes.IntegerType; } else if (bitWidth == 64) { return DataTypes.LongType; } throw new UnsupportedOperationException("unknown int type with bitwidth " + bitWidth); case FloatingPoint: ArrowType.FloatingPoint floatType = (ArrowType.FloatingPoint) fieldType.getType(); FloatingPointPrecision precision = floatType.getPrecision(); switch (precision) { case HALF: case SINGLE: return DataTypes.FloatType; case DOUBLE: return DataTypes.DoubleType; } case Utf8: return DataTypes.StringType; case Binary: case FixedSizeBinary: return DataTypes.BinaryType; case Bool: return DataTypes.BooleanType; case Decimal: throw new UnsupportedOperationException("have not implemented Decimal type yet"); case Date: return DataTypes.DateType; case Time: return DataTypes.TimestampType; //note i don't know what this will do! case Timestamp: return DataTypes.TimestampType; case Interval: return DataTypes.CalendarIntervalType; case NONE: return DataTypes.NullType; } throw new IllegalStateException("Unexpected value: " + fieldType); }