org.apache.spark.sql.types.DecimalType Scala Examples
The following examples show how to use org.apache.spark.sql.types.DecimalType.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: TypeQualifiersSuite.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi.schema import org.apache.hive.service.cli.thrift.TCLIServiceConstants import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{BooleanType, DecimalType} import yaooqinn.kyuubi.utils.ReflectUtils class TypeQualifiersSuite extends SparkFunSuite { test("type qualifier basic tests") { val typeQualifiers1 = TypeQualifiers.fromTypeInfo(new DecimalType(10, 9)) val typeQualifiers2 = TypeQualifiers.fromTypeInfo(BooleanType) assert(ReflectUtils.getFieldValue(typeQualifiers1, "precision") === Some(10)) assert(ReflectUtils.getFieldValue(typeQualifiers1, "scale") === Some(9)) assert(ReflectUtils.getFieldValue(typeQualifiers2, "precision") === None) assert(ReflectUtils.getFieldValue(typeQualifiers2, "scale") === None) assert(typeQualifiers1.toTTypeQualifiers .getQualifiers.get(TCLIServiceConstants.PRECISION).getI32Value === 10) assert(typeQualifiers1.toTTypeQualifiers .getQualifiers.get(TCLIServiceConstants.SCALE).getI32Value === 9) assert(!typeQualifiers1.toTTypeQualifiers .getQualifiers.containsKey(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH)) assert(typeQualifiers2.toTTypeQualifiers.getQualifiers.isEmpty) assert(!typeQualifiers2.toTTypeQualifiers .getQualifiers.containsKey(TCLIServiceConstants.PRECISION)) assert(!typeQualifiers2.toTTypeQualifiers .getQualifiers.containsKey(TCLIServiceConstants.SCALE)) ReflectUtils.invokeMethod( typeQualifiers2, "yaooqinn$kyuubi$schema$TypeQualifiers$$setPrecision", Seq(classOf[Int]), Seq(Integer.valueOf(8))) ReflectUtils.invokeMethod( typeQualifiers2, "yaooqinn$kyuubi$schema$TypeQualifiers$$setScale", Seq(classOf[Int]), Seq(Integer.valueOf(8))) assert(typeQualifiers2.toTTypeQualifiers .getQualifiers.get(TCLIServiceConstants.PRECISION).getI32Value === 8) assert(typeQualifiers2.toTTypeQualifiers .getQualifiers.get(TCLIServiceConstants.SCALE).getI32Value === 8) } }
Example 2
Source File: tbl_demo_test.scala From huemul-bigdatagovernance with Apache License 2.0 | 5 votes |
package samples import com.huemulsolutions.bigdata.common._ import com.huemulsolutions.bigdata.control._ import com.huemulsolutions.bigdata.tables.huemul_Table import com.huemulsolutions.bigdata.tables.huemul_Columns import com.huemulsolutions.bigdata.tables.huemulType_StorageType import com.huemulsolutions.bigdata.tables.huemulType_Tables import com.huemulsolutions.bigdata.tables.huemulType_SecurityLevel import org.apache.spark.sql.types.DataTypes._ import org.apache.spark.sql.types.DecimalType import org.apache.spark.sql.types.Decimal class tbl_demo_test(huemulBigDataGov: huemul_BigDataGovernance, Control: huemul_Control) extends huemul_Table(huemulBigDataGov, Control) with Serializable { this.setAutoCast(true) this.setBusiness_ResponsibleName("Nombre 1") this.setDataBase(huemulBigDataGov.GlobalSettings.DIM_DataBase) this.setDescription("descripcion") this.setDQ_MaxNewRecords_Num(10) this.setDQ_MaxNewRecords_Perc(Decimal.apply(0.30)) this.setGlobalPaths(huemulBigDataGov.GlobalSettings.DIM_BigFiles_Path) this.setIT_ResponsibleName("IT Responsible") this.setLocalPath("demo/") this.setFrequency(huemulType_Frequency.MONTHLY) //this.setPartitionField("periodo_id") this.setStorageType(huemulType_StorageType.ORC) this.setTableType(huemulType_Tables.Reference) this.WhoCanRun_executeFull_addAccess("classname","package") this.WhoCanRun_executeOnlyInsert_addAccess("classname","package") this.WhoCanRun_executeOnlyUpdate_addAccess("classname","package") val codigo_id: huemul_Columns = new huemul_Columns(StringType, true, "descripción del campo") codigo_id.setIsPK (true) codigo_id.setIsUnique ( true) codigo_id.setDQ_MaxDateTimeValue ( "") codigo_id.setDQ_MinDateTimeValue ( "") codigo_id.setDQ_MaxDecimalValue ( Decimal.apply(10)) codigo_id.setDQ_MinDecimalValue ( Decimal.apply(10)) codigo_id.setDQ_MaxLen ( 10) codigo_id.setDQ_MinLen ( 9) codigo_id.setNullable ( true) codigo_id.setDefaultValue ( "'nada'") codigo_id.setSecurityLevel ( huemulType_SecurityLevel.Public) codigo_id.setEncryptedType ( "sin encriptar") codigo_id.setMDM_EnableOldValue ( false) codigo_id.setMDM_EnableDTLog( false) codigo_id.setMDM_EnableProcessLog( false) this.ApplyTableDefinition() }
Example 3
Source File: tbl_demo_test_padre.scala From huemul-bigdatagovernance with Apache License 2.0 | 5 votes |
package samples import com.huemulsolutions.bigdata.common._ import com.huemulsolutions.bigdata.control._ import com.huemulsolutions.bigdata.tables.huemul_Table import com.huemulsolutions.bigdata.tables.huemul_Columns import com.huemulsolutions.bigdata.tables.huemulType_StorageType import com.huemulsolutions.bigdata.tables.huemulType_Tables import com.huemulsolutions.bigdata.tables.huemulType_SecurityLevel import org.apache.spark.sql.types.DataTypes._ import org.apache.spark.sql.types.DecimalType import org.apache.spark.sql.types.Decimal import com.huemulsolutions.bigdata.tables.huemul_Table_Relationship import javax.naming.ldap.Control class tbl_demo_test_padre(huemulBigDataGov: huemul_BigDataGovernance, Control: huemul_Control) extends huemul_Table(huemulBigDataGov, Control) with Serializable { this.setAutoCast(true) this.setBusiness_ResponsibleName("Nombre 1") this.setDataBase(huemulBigDataGov.GlobalSettings.DIM_DataBase) this.setDescription("descripcion") this.setFrequency(huemulType_Frequency.MONTHLY) this.setDQ_MaxNewRecords_Num(10) this.setDQ_MaxNewRecords_Perc(Decimal.apply(0.20)) this.setGlobalPaths(huemulBigDataGov.GlobalSettings.DIM_BigFiles_Path) this.setIT_ResponsibleName("IT Responsible") this.setLocalPath("demo/") //this.setPartitionField("periodo_id") this.setStorageType(huemulType_StorageType.ORC) this.setTableType(huemulType_Tables.Reference) this.WhoCanRun_executeFull_addAccess("classname","package") this.WhoCanRun_executeOnlyInsert_addAccess("classname","package") this.WhoCanRun_executeOnlyUpdate_addAccess("classname","package") val miClave_id: huemul_Columns = new huemul_Columns(StringType, true, "descripción del campo") miClave_id.setIsPK(true) miClave_id.setIsUnique(true) miClave_id.setDQ_MaxDateTimeValue ("") miClave_id.setDQ_MinDateTimeValue ("") miClave_id.setDQ_MaxDecimalValue ( Decimal.apply(10)) miClave_id.setDQ_MinDecimalValue ( Decimal.apply(10)) miClave_id.setDQ_MaxLen ( 10) miClave_id.setDQ_MinLen ( 9) miClave_id.setNullable ( true) miClave_id.setDefaultValue ( "'nada'") miClave_id.setSecurityLevel ( huemulType_SecurityLevel.Public) miClave_id.setEncryptedType ( "nada") miClave_id.setMDM_EnableOldValue ( false) miClave_id.setMDM_EnableDTLog( false) miClave_id.setMDM_EnableProcessLog( false) val codigo_id_aca: huemul_Columns = new huemul_Columns(StringType, true, "descripción del campo fk") val instancia_tbl_demo_test = new tbl_demo_test(huemulBigDataGov, Control) val FK_Rel = new huemul_Table_Relationship(instancia_tbl_demo_test, false) FK_Rel.AddRelationship(instancia_tbl_demo_test.codigo_id, this.codigo_id_aca) this.ApplyTableDefinition() }
Example 4
Source File: ColumnMetadataTest.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.vector import java.util.regex.Pattern import org.apache.spark.sql.types.DecimalType import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalacheck.Gen.{choose, identifier} import org.scalacheck.Prop.{forAll, propBoolean} import org.scalatest.{FunSuite, Matchers} import com.actian.spark_vector.test.tags.RandomizedTest class ColumnMetadataTest extends FunSuite with Matchers { // Generate random column metadata and ensure the resultant StructField's are valid test("generated", RandomizedTest) { forAll(columnMetadataGen)(colMD => { assertColumnMetadata(colMD) }).check } val milliSecsPattern = Pattern.compile(".*\\.(S*)") def assertColumnMetadata(columnMD: ColumnMetadata): Boolean = { val structField = columnMD.structField structField.dataType match { // For decimal type, ensure the scale and precision match case decType: DecimalType => decType.precision should be(columnMD.precision) decType.scale should be(columnMD.scale) case _ => } true } val columnMetadataGen: Gen[ColumnMetadata] = for { name <- identifier typeName <- VectorTypeGen.vectorJdbcTypeGen nullable <- arbitrary[Boolean] precision <- choose(0, 20) scale <- choose(0, Math.min(20, precision)) } yield ColumnMetadata(name, typeName, nullable, precision, scale) }
Example 5
Source File: DecimalExpressionSuite.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{LongType, DecimalType, Decimal} class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { test("UnscaledValue") { val d1 = Decimal("10.1") checkEvaluation(UnscaledValue(Literal(d1)), 101L) val d2 = Decimal(101, 3, 1) checkEvaluation(UnscaledValue(Literal(d2)), 101L) checkEvaluation(UnscaledValue(Literal.create(null, DecimalType(2, 1))), null) } test("MakeDecimal") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) } test("PromotePrecision") { val d1 = Decimal("10.1") checkEvaluation(PromotePrecision(Literal(d1)), d1) val d2 = Decimal(101, 3, 1) checkEvaluation(PromotePrecision(Literal(d2)), d2) checkEvaluation(PromotePrecision(Literal.create(null, DecimalType(2, 1))), null) } test("CheckOverflow") { val d1 = Decimal("10.1") checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 1)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 2)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 3)), null) val d2 = Decimal(101, 3, 1) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 1)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 2)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 3)), null) checkEvaluation(CheckOverflow(Literal.create(null, DecimalType(2, 1)), DecimalType(3, 2)), null) } }
Example 6
Source File: DecimalExpressionSuite.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Decimal, DecimalType, LongType} class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { test("UnscaledValue") { val d1 = Decimal("10.1") checkEvaluation(UnscaledValue(Literal(d1)), 101L) val d2 = Decimal(101, 3, 1) checkEvaluation(UnscaledValue(Literal(d2)), 101L) checkEvaluation(UnscaledValue(Literal.create(null, DecimalType(2, 1))), null) } test("MakeDecimal") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) } test("PromotePrecision") { val d1 = Decimal("10.1") checkEvaluation(PromotePrecision(Literal(d1)), d1) val d2 = Decimal(101, 3, 1) checkEvaluation(PromotePrecision(Literal(d2)), d2) checkEvaluation(PromotePrecision(Literal.create(null, DecimalType(2, 1))), null) } test("CheckOverflow") { val d1 = Decimal("10.1") checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 1)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 2)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 3)), null) val d2 = Decimal(101, 3, 1) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 1)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 2)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 3)), null) checkEvaluation(CheckOverflow(Literal.create(null, DecimalType(2, 1)), DecimalType(3, 2)), null) } }
Example 7
Source File: DecimalExpressionSuite.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{LongType, DecimalType, Decimal} class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { //非标准的值 test("UnscaledValue") { val d1 = Decimal("10.1") checkEvaluation(UnscaledValue(Literal(d1)), 101L) val d2 = Decimal(101, 3, 1) checkEvaluation(UnscaledValue(Literal(d2)), 101L) checkEvaluation(UnscaledValue(Literal.create(null, DecimalType(2, 1))), null) } //十进制 test("MakeDecimal") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) } //提高精度 test("PromotePrecision") { val d1 = Decimal("10.1") checkEvaluation(PromotePrecision(Literal(d1)), d1) val d2 = Decimal(101, 3, 1) checkEvaluation(PromotePrecision(Literal(d2)), d2) checkEvaluation(PromotePrecision(Literal.create(null, DecimalType(2, 1))), null) } //检查溢出 test("CheckOverflow") { val d1 = Decimal("10.1") checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 1)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 2)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 3)), null) val d2 = Decimal(101, 3, 1) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 1)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 2)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 3)), null) checkEvaluation(CheckOverflow(Literal.create(null, DecimalType(2, 1)), DecimalType(3, 2)), null) } }
Example 8
Source File: GenerateMutableProjection.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions.codegen import scala.collection.mutable.ArrayBuffer import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate.NoOp import org.apache.spark.sql.types.DecimalType // MutableProjection is not accessible in Java abstract class BaseMutableProjection extends MutableProjection public InternalRow currentValue() { return (InternalRow) mutableRow; } public Object apply(Object _i) { InternalRow i = (InternalRow) _i; $allProjections return mutableRow; } } """ logDebug(s"code for ${expressions.mkString(",")}:\n${CodeFormatter.format(code)}") val c = compile(code) () => { c.generate(ctx.references.toArray).asInstanceOf[MutableProjection] } } }
Example 9
Source File: ColumnDescriptorSuite.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi.schema import org.apache.hive.service.cli.thrift.{TCLIServiceConstants, TTypeId} import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{DecimalType, StringType, StructType} class ColumnDescriptorSuite extends SparkFunSuite { test("Column Descriptor basic test") { val col1 = "a" val col2 = "b" val comments = "no comments" val schema = new StructType() .add(col1, StringType, nullable = true, comments) .add(col2, DecimalType(10, 9), nullable = true, "") val tColumnDescs = (0 until schema.length).map(i => ColumnDescriptor(schema(i), i)).map(_.toTColumnDesc) assert(tColumnDescs.head.getColumnName === col1) assert(tColumnDescs.head.getComment === comments) assert(tColumnDescs.head.getPosition === 0) assert(tColumnDescs.head.getTypeDesc === TypeDescriptor(StringType).toTTypeDesc) assert(tColumnDescs.head.getTypeDesc.getTypesSize === 1) assert(tColumnDescs.head.getTypeDesc.getTypes.get(0) .getPrimitiveEntry.getTypeQualifiers === null) assert(tColumnDescs.head.getTypeDesc.getTypes.get(0) .getPrimitiveEntry.getType === TTypeId.STRING_TYPE) assert(tColumnDescs(1).getColumnName === col2) assert(tColumnDescs(1).getComment === "") assert(tColumnDescs(1).getPosition === 1) assert(tColumnDescs(1).getTypeDesc.getTypesSize === 1) assert(tColumnDescs(1) .getTypeDesc .getTypes.get(0) .getPrimitiveEntry .getTypeQualifiers .getQualifiers .get(TCLIServiceConstants.PRECISION).getI32Value === 10) assert(tColumnDescs(1) .getTypeDesc .getTypes.get(0) .getPrimitiveEntry .getTypeQualifiers .getQualifiers .get(TCLIServiceConstants.SCALE).getI32Value === 9) assert(tColumnDescs(1).getTypeDesc.getTypes.get(0) .getPrimitiveEntry.getType === TTypeId.DECIMAL_TYPE) } test("field is null") { val tColumnDesc = ColumnDescriptor(null, 0).toTColumnDesc assert(tColumnDesc.isSetPosition) assert(!tColumnDesc.isSetColumnName) assert(!tColumnDesc.isSetTypeDesc) assert(!tColumnDesc.isSetComment) } }
Example 10
Source File: TypeDescriptorSuite.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi.schema import org.apache.hive.service.cli.thrift.{TCLIServiceConstants, TTypeId} import org.apache.spark.{MyDataType, SparkFunSuite} import org.apache.spark.sql.types.{ByteType, DecimalType} import yaooqinn.kyuubi.utils.ReflectUtils class TypeDescriptorSuite extends SparkFunSuite { test("TypeDescriptor basic tests") { val typeDescriptor = TypeDescriptor(new DecimalType(10, 9)) val tTypeDesc = typeDescriptor.toTTypeDesc assert(tTypeDesc.getTypesSize === 1) assert( tTypeDesc .getTypes.get(0) .getPrimitiveEntry .getTypeQualifiers .getQualifiers .get(TCLIServiceConstants.PRECISION).getI32Value === 10) val typeDescriptor2 = new TypeDescriptor(ByteType) val tTypeDesc2 = typeDescriptor2.toTTypeDesc assert(tTypeDesc2.getTypesSize === 1) assert(tTypeDesc2.getTypes.get(0).getPrimitiveEntry.getTypeQualifiers === null) assert(tTypeDesc2.getTypes.get(0).getPrimitiveEntry.getType === TTypeId.TINYINT_TYPE) assert(ReflectUtils.getFieldValue(typeDescriptor, "typeQualifiers") .asInstanceOf[Option[TypeDescriptor]].isDefined) assert(ReflectUtils.getFieldValue(typeDescriptor2, "typeQualifiers") .asInstanceOf[Option[TypeDescriptor]].isEmpty) val e = intercept[IllegalArgumentException](TypeDescriptor(null).toTTypeDesc) assert(e.getMessage === "Unrecognized type name: null") val e2 = intercept[IllegalArgumentException](TypeDescriptor(new MyDataType).toTTypeDesc) assert(e2.getMessage === "Unrecognized type name: mydata") } }
Example 11
Source File: DecimalExpressionSuite.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Decimal, DecimalType, LongType} class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { test("UnscaledValue") { val d1 = Decimal("10.1") checkEvaluation(UnscaledValue(Literal(d1)), 101L) val d2 = Decimal(101, 3, 1) checkEvaluation(UnscaledValue(Literal(d2)), 101L) checkEvaluation(UnscaledValue(Literal.create(null, DecimalType(2, 1))), null) } test("MakeDecimal") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) } test("PromotePrecision") { val d1 = Decimal("10.1") checkEvaluation(PromotePrecision(Literal(d1)), d1) val d2 = Decimal(101, 3, 1) checkEvaluation(PromotePrecision(Literal(d2)), d2) checkEvaluation(PromotePrecision(Literal.create(null, DecimalType(2, 1))), null) } test("CheckOverflow") { val d1 = Decimal("10.1") checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 1)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 2)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 3)), null) val d2 = Decimal(101, 3, 1) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 1)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 2)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 3)), null) checkEvaluation(CheckOverflow(Literal.create(null, DecimalType(2, 1)), DecimalType(3, 2)), null) } }
Example 12
Source File: TypeDescriptor.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi.schema import org.apache.hive.service.cli.thrift.{TPrimitiveTypeEntry, TTypeDesc, TTypeEntry} import org.apache.spark.sql.types.{DataType, DecimalType} case class TypeDescriptor(typ: DataType) { private val typeQualifiers: Option[TypeQualifiers] = typ match { case d: DecimalType => Some(TypeQualifiers.fromTypeInfo(d)) case _ => None } def toTTypeDesc: TTypeDesc = { val primitiveEntry = new TPrimitiveTypeEntry(SchemaMapper.toTTypeId(typ)) typeQualifiers.map(_.toTTypeQualifiers).foreach(primitiveEntry.setTypeQualifiers) val entry = TTypeEntry.primitiveEntry(primitiveEntry) val desc = new TTypeDesc desc.addToTypes(entry) desc } }
Example 13
Source File: TypeQualifiers.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi.schema import scala.collection.JavaConverters._ import org.apache.hive.service.cli.thrift.{TCLIServiceConstants, TTypeQualifiers, TTypeQualifierValue} import org.apache.spark.sql.types.{DataType, DecimalType} class TypeQualifiers private() { private var precision: Option[Int] = None private var scale: Option[Int] = None private def setPrecision(precision: Int): Unit = { this.precision = Some(precision) } private def setScale(scale: Int): Unit = { this.scale = Some(scale) } def toTTypeQualifiers: TTypeQualifiers = new TTypeQualifiers( (precision.map(TTypeQualifierValue.i32Value).map(TCLIServiceConstants.PRECISION -> _) ++ scale.map(TTypeQualifierValue.i32Value).map(TCLIServiceConstants.SCALE -> _)).toMap.asJava) } object TypeQualifiers { def fromTypeInfo(typ: DataType): TypeQualifiers = { val result = new TypeQualifiers typ match { case decimalType: DecimalType => result.setScale(decimalType.scale) result.setPrecision(decimalType.precision) case _ => } result } }
Example 14
Source File: DecimalExpressionSuite.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Decimal, DecimalType, LongType} class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { test("UnscaledValue") { val d1 = Decimal("10.1") checkEvaluation(UnscaledValue(Literal(d1)), 101L) val d2 = Decimal(101, 3, 1) checkEvaluation(UnscaledValue(Literal(d2)), 101L) checkEvaluation(UnscaledValue(Literal.create(null, DecimalType(2, 1))), null) } test("MakeDecimal") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) } test("PromotePrecision") { val d1 = Decimal("10.1") checkEvaluation(PromotePrecision(Literal(d1)), d1) val d2 = Decimal(101, 3, 1) checkEvaluation(PromotePrecision(Literal(d2)), d2) checkEvaluation(PromotePrecision(Literal.create(null, DecimalType(2, 1))), null) } test("CheckOverflow") { val d1 = Decimal("10.1") checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 1)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 2)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 3)), null) val d2 = Decimal(101, 3, 1) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 1)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 2)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 3)), null) checkEvaluation(CheckOverflow(Literal.create(null, DecimalType(2, 1)), DecimalType(3, 2)), null) } }
Example 15
Source File: DruidPlannerHelper.scala From spark-druid-olap with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.sources.druid import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, LogicalPlan} import org.apache.spark.sql.types.{DecimalType, _} import org.sparklinedata.druid.DruidOperatorAttribute trait DruidPlannerHelper { def unalias(e: Expression, agg: Aggregate): Option[Expression] = { agg.aggregateExpressions.find { aE => (aE, e) match { case _ if aE == e => true case (_, e:AttributeReference) if e.exprId == aE.exprId => true case (Alias(child, _), e) if child == e => true case _ => false } }.map { case Alias(child, _) => child case x => x } } def findAttribute(e: Expression): Option[AttributeReference] = { e.find(_.isInstanceOf[AttributeReference]).map(_.asInstanceOf[AttributeReference]) } def positionOfAttribute(e: Expression, plan: LogicalPlan): Option[(Expression, (AttributeReference, Int))] = { for (aR <- findAttribute(e); attr <- plan.output.zipWithIndex.find(t => t._1.exprId == aR.exprId)) yield (e, (aR, attr._2)) } def exprIdToAttribute(e: Expression, plan: LogicalPlan): Option[(ExprId, Int)] = { for (aR <- findAttribute(e); attr <- plan.output.zipWithIndex.find(t => t._1.exprId == aR.exprId)) yield (aR.exprId, attr._2) } case class GroupingInfo(gEs: Seq[Expression], expandOpGExps : Seq[Expression], aEs: Seq[NamedExpression], expandOpProjection : Seq[Expression], aEExprIdToPos : Map[ExprId, Int], aEToLiteralExpr: Map[Expression, Expression] = Map()) def isNumericType(dt : DataType) : Boolean = NumericType.acceptsType(dt) }
Example 16
Source File: SqlAstBuilderHelper.scala From carbondata with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive import org.apache.spark.sql.catalyst.parser.ParserUtils.{string, withOrigin} import org.apache.spark.sql.catalyst.parser.SqlBaseParser import org.apache.spark.sql.catalyst.parser.SqlBaseParser.{AddTableColumnsContext, ChangeColumnContext, CreateTableContext, ShowTablesContext} import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.CarbonParserUtil import org.apache.spark.sql.execution.SparkSqlAstBuilder import org.apache.spark.sql.execution.command.{AlterTableAddColumnsModel, AlterTableDataTypeChangeModel} import org.apache.spark.sql.execution.command.schema.{CarbonAlterTableAddColumnCommand, CarbonAlterTableColRenameDataTypeChangeCommand} import org.apache.spark.sql.execution.command.table.{CarbonExplainCommand, CarbonShowTablesCommand} import org.apache.spark.sql.parser.CarbonSpark2SqlParser import org.apache.spark.sql.types.DecimalType import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.util.CarbonProperties trait SqlAstBuilderHelper extends SparkSqlAstBuilder { override def visitChangeColumn(ctx: ChangeColumnContext): LogicalPlan = { val newColumn = visitColType(ctx.colType) val isColumnRename = if (!ctx.identifier.getText.equalsIgnoreCase(newColumn.name)) { true } else { false } val (typeString, values): (String, Option[List[(Int, Int)]]) = newColumn.dataType match { case d: DecimalType => ("decimal", Some(List((d.precision, d.scale)))) case _ => (newColumn.dataType.typeName.toLowerCase, None) } val alterTableColRenameAndDataTypeChangeModel = AlterTableDataTypeChangeModel( CarbonParserUtil.parseDataType(typeString, values, isColumnRename), CarbonParserUtil.convertDbNameToLowerCase(Option(ctx.tableIdentifier().db).map(_.getText)), ctx.tableIdentifier().table.getText.toLowerCase, ctx.identifier.getText.toLowerCase, newColumn.name.toLowerCase, isColumnRename) CarbonAlterTableColRenameDataTypeChangeCommand(alterTableColRenameAndDataTypeChangeModel) } def visitAddTableColumns(parser: CarbonSpark2SqlParser, ctx: AddTableColumnsContext): LogicalPlan = { val cols = Option(ctx.columns).toSeq.flatMap(visitColTypeList) val fields = parser.getFields(cols) val tblProperties = scala.collection.mutable.Map.empty[String, String] val tableModel = CarbonParserUtil.prepareTableModel(false, CarbonParserUtil.convertDbNameToLowerCase(Option(ctx.tableIdentifier().db).map(_.getText)), ctx.tableIdentifier.table.getText.toLowerCase, fields, Seq.empty, tblProperties, None, true) val alterTableAddColumnsModel = AlterTableAddColumnsModel( Option(ctx.tableIdentifier().db).map(_.getText), ctx.tableIdentifier.table.getText, tblProperties.toMap, tableModel.dimCols, tableModel.msrCols, tableModel.highcardinalitydims.getOrElse(Seq.empty)) CarbonAlterTableAddColumnCommand(alterTableAddColumnsModel) } override def visitCreateTable(ctx: CreateTableContext): LogicalPlan = { super.visitCreateTable(ctx) } override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = { withOrigin(ctx) { CarbonShowTablesCommand( Option(ctx.db).map(_.getText), Option(ctx.pattern).map(string)) } } override def visitExplain(ctx: SqlBaseParser.ExplainContext): LogicalPlan = { CarbonExplainCommand(super.visitExplain(ctx)) } }
Example 17
Source File: DecimalExpressionSuite.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Decimal, DecimalType, LongType} class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { test("UnscaledValue") { val d1 = Decimal("10.1") checkEvaluation(UnscaledValue(Literal(d1)), 101L) val d2 = Decimal(101, 3, 1) checkEvaluation(UnscaledValue(Literal(d2)), 101L) checkEvaluation(UnscaledValue(Literal.create(null, DecimalType(2, 1))), null) } test("MakeDecimal") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) } test("PromotePrecision") { val d1 = Decimal("10.1") checkEvaluation(PromotePrecision(Literal(d1)), d1) val d2 = Decimal(101, 3, 1) checkEvaluation(PromotePrecision(Literal(d2)), d2) checkEvaluation(PromotePrecision(Literal.create(null, DecimalType(2, 1))), null) } test("CheckOverflow") { val d1 = Decimal("10.1") checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 1)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 2)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 3)), null) val d2 = Decimal(101, 3, 1) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 1)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 2)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 3)), null) checkEvaluation(CheckOverflow(Literal.create(null, DecimalType(2, 1)), DecimalType(3, 2)), null) } }
Example 18
Source File: DataTypeMapping.scala From azure-kusto-spark with Apache License 2.0 | 5 votes |
package com.microsoft.kusto.spark.utils import org.apache.spark.sql.types.DataTypes._ import org.apache.spark.sql.types.{ArrayType, DataType, DataTypes, DecimalType, MapType, StructType} object DataTypeMapping { val kustoTypeToSparkTypeMap: Map[String, DataType] = Map( "string" -> StringType, "long" -> LongType, "datetime" -> TimestampType,// Kusto datetime is equivalent to TimestampType "timespan" -> StringType, "bool" -> BooleanType, "real" -> DoubleType, // Can be partitioned differently between precision and scale, total must be 34 to match .Net SqlDecimal "decimal" -> DataTypes.createDecimalType(20,14), "guid" -> StringType, "int" -> IntegerType, "dynamic" -> StringType ) val kustoJavaTypeToSparkTypeMap: Map[String, DataType] = Map( "string" -> StringType, "int64" -> LongType, "datetime" -> TimestampType, "timespan" -> StringType, "sbyte" -> BooleanType, "double" -> DoubleType, "sqldecimal" -> DataTypes.createDecimalType(20,14), "guid" -> StringType, "int32" -> IntegerType, "object" -> StringType ) val sparkTypeToKustoTypeMap: Map[DataType, String] = Map( StringType -> "string", BooleanType -> "bool", DateType -> "datetime", TimestampType -> "datetime", DataTypes.createDecimalType() -> "decimal", DoubleType -> "real", FloatType -> "real", ByteType -> "int", IntegerType -> "int", LongType -> "long", ShortType -> "int" ) def getSparkTypeToKustoTypeMap(fieldType: DataType): String ={ if(fieldType.isInstanceOf[DecimalType]) "decimal" else if (fieldType.isInstanceOf[ArrayType] || fieldType.isInstanceOf[StructType] || fieldType.isInstanceOf[MapType]) "dynamic" else DataTypeMapping.sparkTypeToKustoTypeMap.getOrElse(fieldType, "string") } }
Example 19
Source File: DecimalExpressionSuite.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Decimal, DecimalType, LongType} class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { test("UnscaledValue") { val d1 = Decimal("10.1") checkEvaluation(UnscaledValue(Literal(d1)), 101L) val d2 = Decimal(101, 3, 1) checkEvaluation(UnscaledValue(Literal(d2)), 101L) checkEvaluation(UnscaledValue(Literal.create(null, DecimalType(2, 1))), null) } test("MakeDecimal") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) } test("PromotePrecision") { val d1 = Decimal("10.1") checkEvaluation(PromotePrecision(Literal(d1)), d1) val d2 = Decimal(101, 3, 1) checkEvaluation(PromotePrecision(Literal(d2)), d2) checkEvaluation(PromotePrecision(Literal.create(null, DecimalType(2, 1))), null) } test("CheckOverflow") { val d1 = Decimal("10.1") checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 1)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 2)), d1) checkEvaluation(CheckOverflow(Literal(d1), DecimalType(4, 3)), null) val d2 = Decimal(101, 3, 1) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 0)), Decimal("10")) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 1)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 2)), d2) checkEvaluation(CheckOverflow(Literal(d2), DecimalType(4, 3)), null) checkEvaluation(CheckOverflow(Literal.create(null, DecimalType(2, 1)), DecimalType(3, 2)), null) } }