org.apache.spark.sql.internal.StaticSQLConf Scala Examples
The following examples show how to use org.apache.spark.sql.internal.StaticSQLConf.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: SharedSparkSessionBase.scala From spark-alchemy with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.test import org.apache.spark.sql.internal.StaticSQLConf import org.apache.spark.sql.{SQLContext, SparkSession} import org.apache.spark.{DebugFilesystem, SparkConf} import org.scalatest.Suite import org.scalatest.concurrent.Eventually import scala.concurrent.duration._ protected override def afterAll(): Unit = { try { super.afterAll() } finally { try { if (_spark != null) { try { _spark.sessionState.catalog.reset() } finally { try { waitForTasksToFinish() } finally { _spark.stop() _spark = null } } } } finally { SparkSession.clearActiveSession() SparkSession.clearDefaultSession() } } } protected override def beforeEach(): Unit = { super.beforeEach() DebugFilesystem.clearOpenStreams() } protected override def afterEach(): Unit = { super.afterEach() // Clear all persistent datasets after each test spark.sharedState.cacheManager.clearCache() // files can be closed from other threads, so wait a bit // normally this doesn't take more than 1s eventually(timeout(30.seconds), interval(2.seconds)) { DebugFilesystem.assertNoOpenStreams() } } }
Example 2
Source File: SharedSparkContext.scala From tispark with Apache License 2.0 | 5 votes |
package org.apache.spark import org.apache.spark.SharedSparkContext._ import org.apache.spark.sql.internal.StaticSQLConf import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite} trait SharedSparkContext extends BeforeAndAfterAll with BeforeAndAfterEach { self: Suite => protected var _isHiveEnabled: Boolean = false protected var conf: SparkConf = new SparkConf(false) def sc: SparkContext = _sc override protected def beforeAll(): Unit = { super.beforeAll() if (_sc != null) { SharedSparkContext.stop() } initializeContext() } protected def initializeContext(): Unit = synchronized { if (null == _sc) { conf.set("spark.sql.test.key", "true") if (_isHiveEnabled) { conf.set(StaticSQLConf.CATALOG_IMPLEMENTATION, "hive") } _sc = new SparkContext("local[4]", "tispark-integration-test", conf) } } override protected def afterAll(): Unit = { try { SharedSparkContext.stop() } finally { super.afterAll() } } } object SharedSparkContext { @transient private var _sc: SparkContext = _ def stop(): Unit = synchronized { if (_sc != null) { _sc.stop() _sc = null } // To avoid RPC rebinding to the same port, since it doesn't unbind immediately on shutdown System.clearProperty("spark.driver.port") } }
Example 3
Source File: DeltaHiveTest.scala From delta with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.delta.test import org.apache.spark.sql.delta.catalog.DeltaCatalog import io.delta.sql.DeltaSparkSessionExtension import org.scalatest.BeforeAndAfterAll import org.apache.spark.{SparkContext, SparkFunSuite} import org.apache.spark.sql.SparkSession import org.apache.spark.sql.hive.test.{TestHive, TestHiveContext} import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf} import org.apache.spark.sql.test.SQLTestUtils trait DeltaHiveTest extends SparkFunSuite with BeforeAndAfterAll { self: SQLTestUtils => private var _session: SparkSession = _ private var _hiveContext: TestHiveContext = _ private var _sc: SparkContext = _ override def beforeAll(): Unit = { val conf = TestHive.sparkSession.sparkContext.getConf.clone() TestHive.sparkSession.stop() conf.set(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION.key, classOf[DeltaCatalog].getName) conf.set(StaticSQLConf.SPARK_SESSION_EXTENSIONS.key, classOf[DeltaSparkSessionExtension].getName) _sc = new SparkContext("local", this.getClass.getName, conf) _hiveContext = new TestHiveContext(_sc) _session = _hiveContext.sparkSession SparkSession.setActiveSession(_session) super.beforeAll() } override protected def spark: SparkSession = _session override def afterAll(): Unit = { try { _hiveContext.reset() } finally { _sc.stop() } } }
Example 4
Source File: SparkSessionUtils.scala From mist with Apache License 2.0 | 5 votes |
package org.apache.spark import org.apache.spark.sql.SparkSession import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf} object SparkSessionUtils { def getOrCreate(sc: SparkContext, withHiveSupport: Boolean): SparkSession = { val builder = SparkSession .builder() .sparkContext(sc) .config(sc.conf) if (withHiveSupport) { sc.conf.set(StaticSQLConf.CATALOG_IMPLEMENTATION.key, "hive") builder.enableHiveSupport().getOrCreate() } else builder.getOrCreate() } }