org.apache.spark.sql.catalyst.parser.ParseException Scala Examples
The following examples show how to use org.apache.spark.sql.catalyst.parser.ParseException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: HiveExplainSuite.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.execution import org.apache.spark.sql.QueryTest import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.hive.test.TestHiveSingleton import org.apache.spark.sql.test.SQLTestUtils }" ) checkKeywordsNotExist(sql("EXPLAIN CODEGEN SELECT 1"), "== Physical Plan ==" ) intercept[ParseException] { sql("EXPLAIN EXTENDED CODEGEN SELECT 1") } } }
Example 2
Source File: JdbcUtilsSuite.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources.jdbc import org.apache.spark.SparkFunSuite import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.types._ class JdbcUtilsSuite extends SparkFunSuite { val tableSchema = StructType(Seq( StructField("C1", StringType, false), StructField("C2", IntegerType, false))) val caseSensitive = org.apache.spark.sql.catalyst.analysis.caseSensitiveResolution val caseInsensitive = org.apache.spark.sql.catalyst.analysis.caseInsensitiveResolution test("Parse user specified column types") { assert(JdbcUtils.getCustomSchema(tableSchema, null, caseInsensitive) === tableSchema) assert(JdbcUtils.getCustomSchema(tableSchema, "", caseInsensitive) === tableSchema) assert(JdbcUtils.getCustomSchema(tableSchema, "c1 DATE", caseInsensitive) === StructType(Seq(StructField("C1", DateType, false), StructField("C2", IntegerType, false)))) assert(JdbcUtils.getCustomSchema(tableSchema, "c1 DATE", caseSensitive) === StructType(Seq(StructField("C1", StringType, false), StructField("C2", IntegerType, false)))) assert( JdbcUtils.getCustomSchema(tableSchema, "c1 DATE, C2 STRING", caseInsensitive) === StructType(Seq(StructField("C1", DateType, false), StructField("C2", StringType, false)))) assert(JdbcUtils.getCustomSchema(tableSchema, "c1 DATE, C2 STRING", caseSensitive) === StructType(Seq(StructField("C1", StringType, false), StructField("C2", StringType, false)))) // Throw AnalysisException val duplicate = intercept[AnalysisException]{ JdbcUtils.getCustomSchema(tableSchema, "c1 DATE, c1 STRING", caseInsensitive) === StructType(Seq(StructField("c1", DateType, false), StructField("c1", StringType, false))) } assert(duplicate.getMessage.contains( "Found duplicate column(s) in the customSchema option value")) // Throw ParseException val dataTypeNotSupported = intercept[ParseException]{ JdbcUtils.getCustomSchema(tableSchema, "c3 DATEE, C2 STRING", caseInsensitive) === StructType(Seq(StructField("c3", DateType, false), StructField("C2", StringType, false))) } assert(dataTypeNotSupported.getMessage.contains("DataType datee is not supported")) val mismatchedInput = intercept[ParseException]{ JdbcUtils.getCustomSchema(tableSchema, "c3 DATE. C2 STRING", caseInsensitive) === StructType(Seq(StructField("c3", DateType, false), StructField("C2", StringType, false))) } assert(mismatchedInput.getMessage.contains("mismatched input '.' expecting")) } }
Example 3
Source File: JdbcUtilsSuite.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources.jdbc import org.apache.spark.SparkFunSuite import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.types._ class JdbcUtilsSuite extends SparkFunSuite { val tableSchema = StructType(Seq( StructField("C1", StringType, false), StructField("C2", IntegerType, false))) val caseSensitive = org.apache.spark.sql.catalyst.analysis.caseSensitiveResolution val caseInsensitive = org.apache.spark.sql.catalyst.analysis.caseInsensitiveResolution test("Parse user specified column types") { assert(JdbcUtils.getCustomSchema(tableSchema, null, caseInsensitive) === tableSchema) assert(JdbcUtils.getCustomSchema(tableSchema, "", caseInsensitive) === tableSchema) assert(JdbcUtils.getCustomSchema(tableSchema, "c1 DATE", caseInsensitive) === StructType(Seq(StructField("C1", DateType, false), StructField("C2", IntegerType, false)))) assert(JdbcUtils.getCustomSchema(tableSchema, "c1 DATE", caseSensitive) === StructType(Seq(StructField("C1", StringType, false), StructField("C2", IntegerType, false)))) assert( JdbcUtils.getCustomSchema(tableSchema, "c1 DATE, C2 STRING", caseInsensitive) === StructType(Seq(StructField("C1", DateType, false), StructField("C2", StringType, false)))) assert(JdbcUtils.getCustomSchema(tableSchema, "c1 DATE, C2 STRING", caseSensitive) === StructType(Seq(StructField("C1", StringType, false), StructField("C2", StringType, false)))) // Throw AnalysisException val duplicate = intercept[AnalysisException]{ JdbcUtils.getCustomSchema(tableSchema, "c1 DATE, c1 STRING", caseInsensitive) === StructType(Seq(StructField("c1", DateType, false), StructField("c1", StringType, false))) } assert(duplicate.getMessage.contains( "Found duplicate column(s) in the customSchema option value")) // Throw ParseException val dataTypeNotSupported = intercept[ParseException]{ JdbcUtils.getCustomSchema(tableSchema, "c3 DATEE, C2 STRING", caseInsensitive) === StructType(Seq(StructField("c3", DateType, false), StructField("C2", StringType, false))) } assert(dataTypeNotSupported.getMessage.contains("DataType datee is not supported")) val mismatchedInput = intercept[ParseException]{ JdbcUtils.getCustomSchema(tableSchema, "c3 DATE. C2 STRING", caseInsensitive) === StructType(Seq(StructField("c3", DateType, false), StructField("C2", StringType, false))) } assert(mismatchedInput.getMessage.contains("mismatched input '.' expecting")) } }