org.apache.spark.sql.hive.test.TestHiveContext Scala Examples
The following examples show how to use org.apache.spark.sql.hive.test.TestHiveContext.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ConcurrentHiveSuite.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.execution import org.scalatest.BeforeAndAfterAll import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.sql.hive.test.TestHiveContext class ConcurrentHiveSuite extends SparkFunSuite with BeforeAndAfterAll { ignore("multiple instances not supported") { test("Multiple Hive Instances") { (1 to 10).map { i => val conf = new SparkConf() conf.set("spark.ui.enabled", "false") val ts = new TestHiveContext(new SparkContext("local", s"TestSQLContext$i", conf)) ts.sparkSession.sql("SHOW TABLES").collect() ts.sparkSession.sql("SELECT * FROM src").collect() ts.sparkSession.sql("SHOW TABLES").collect() } } } }
Example 2
Source File: ConcurrentHiveSuite.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.execution import org.scalatest.BeforeAndAfterAll import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.sql.hive.test.TestHiveContext class ConcurrentHiveSuite extends SparkFunSuite with BeforeAndAfterAll { ignore("multiple instances not supported") { test("Multiple Hive Instances") { (1 to 10).map { i => val conf = new SparkConf() conf.set("spark.ui.enabled", "false") val ts = new TestHiveContext(new SparkContext("local", s"TestSQLContext$i", conf)) ts.sparkSession.sql("SHOW TABLES").collect() ts.sparkSession.sql("SELECT * FROM src").collect() ts.sparkSession.sql("SHOW TABLES").collect() } } } }
Example 3
Source File: ConcurrentHiveSuite.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.execution import org.scalatest.BeforeAndAfterAll import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.sql.hive.test.TestHiveContext class ConcurrentHiveSuite extends SparkFunSuite with BeforeAndAfterAll { ignore("multiple instances not supported") { test("Multiple Hive Instances") { (1 to 10).map { i => val conf = new SparkConf() conf.set("spark.ui.enabled", "false") val ts = new TestHiveContext(new SparkContext("local", s"TestSQLContext$i", conf)) ts.sparkSession.sql("SHOW TABLES").collect() ts.sparkSession.sql("SELECT * FROM src").collect() ts.sparkSession.sql("SHOW TABLES").collect() } } } }
Example 4
Source File: DeltaHiveTest.scala From delta with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.delta.test import org.apache.spark.sql.delta.catalog.DeltaCatalog import io.delta.sql.DeltaSparkSessionExtension import org.scalatest.BeforeAndAfterAll import org.apache.spark.{SparkContext, SparkFunSuite} import org.apache.spark.sql.SparkSession import org.apache.spark.sql.hive.test.{TestHive, TestHiveContext} import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf} import org.apache.spark.sql.test.SQLTestUtils trait DeltaHiveTest extends SparkFunSuite with BeforeAndAfterAll { self: SQLTestUtils => private var _session: SparkSession = _ private var _hiveContext: TestHiveContext = _ private var _sc: SparkContext = _ override def beforeAll(): Unit = { val conf = TestHive.sparkSession.sparkContext.getConf.clone() TestHive.sparkSession.stop() conf.set(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION.key, classOf[DeltaCatalog].getName) conf.set(StaticSQLConf.SPARK_SESSION_EXTENSIONS.key, classOf[DeltaSparkSessionExtension].getName) _sc = new SparkContext("local", this.getClass.getName, conf) _hiveContext = new TestHiveContext(_sc) _session = _hiveContext.sparkSession SparkSession.setActiveSession(_session) super.beforeAll() } override protected def spark: SparkSession = _session override def afterAll(): Unit = { try { _hiveContext.reset() } finally { _sc.stop() } } }
Example 5
Source File: ConcurrentHiveSuite.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.execution import org.scalatest.BeforeAndAfterAll import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.sql.hive.test.TestHiveContext class ConcurrentHiveSuite extends SparkFunSuite with BeforeAndAfterAll { ignore("multiple instances not supported") { test("Multiple Hive Instances") { (1 to 10).map { i => val conf = new SparkConf() conf.set("spark.ui.enabled", "false") val ts = new TestHiveContext(new SparkContext("local", s"TestSQLContext$i", conf)) ts.sparkSession.sql("SHOW TABLES").collect() ts.sparkSession.sql("SELECT * FROM src").collect() ts.sparkSession.sql("SHOW TABLES").collect() } } } }
Example 6
Source File: ConcurrentHiveSuite.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.execution import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.sql.hive.test.TestHiveContext import org.scalatest.BeforeAndAfterAll class ConcurrentHiveSuite extends SparkFunSuite with BeforeAndAfterAll { ignore("multiple instances not supported") { test("Multiple Hive Instances") { (1 to 10).map { i => val ts = new TestHiveContext(new SparkContext("local", s"TestSQLContext$i", new SparkConf())) ts.executeSql("SHOW TABLES").toRdd.collect() ts.executeSql("SELECT * FROM src").toRdd.collect() ts.executeSql("SHOW TABLES").toRdd.collect() } } } }
Example 7
Source File: TestWithHiveContext.scala From aerosolve with Apache License 2.0 | 5 votes |
package com.airbnb.common.ml.util.testutil import org.apache.spark.sql.hive.test.TestHiveContext import org.junit.After import org.junit.Before trait TestWithHiveContext extends TestWithSparkContext { @transient private var _hc: Option[TestHiveContext] = None // Can't be called before `initHiveContext()` def hc: TestHiveContext = _hc.get @Before override def initContexts(): Unit = { initSparkContext() initHiveContext() } @After override def cleanupContexts(): Unit = { cleanupSparkContext() cleanupHiveContext() } protected def initHiveContext(): Unit = { _hc = Some(TestHiveContextProvider.createContext(sc)) } protected def cleanupHiveContext(): Unit = { TestHiveContextProvider.stopContext(hc) _hc = None } }
Example 8
Source File: ConcurrentHiveSuite.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.execution import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.sql.hive.test.TestHiveContext import org.scalatest.BeforeAndAfterAll class ConcurrentHiveSuite extends SparkFunSuite with BeforeAndAfterAll { ignore("multiple instances not supported") {//不支持多个实例 test("Multiple Hive Instances") { (1 to 10).map { i => val ts = new TestHiveContext(new SparkContext("local", s"TestSQLContext$i", new SparkConf())) ts.executeSql("SHOW TABLES").toRdd.collect() ts.executeSql("SELECT * FROM src").toRdd.collect() ts.executeSql("SHOW TABLES").toRdd.collect() } } } }
Example 9
Source File: ConcurrentHiveSuite.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.execution import org.scalatest.BeforeAndAfterAll import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.sql.hive.test.TestHiveContext class ConcurrentHiveSuite extends SparkFunSuite with BeforeAndAfterAll { ignore("multiple instances not supported") { test("Multiple Hive Instances") { (1 to 10).map { i => val conf = new SparkConf() conf.set("spark.ui.enabled", "false") val ts = new TestHiveContext(new SparkContext("local", s"TestSQLContext$i", conf)) ts.sparkSession.sql("SHOW TABLES").collect() ts.sparkSession.sql("SELECT * FROM src").collect() ts.sparkSession.sql("SHOW TABLES").collect() } } } }
Example 10
Source File: ConcurrentHiveSuite.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.execution import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.sql.hive.test.TestHiveContext import org.scalatest.BeforeAndAfterAll class ConcurrentHiveSuite extends SparkFunSuite with BeforeAndAfterAll { ignore("multiple instances not supported") { test("Multiple Hive Instances") { (1 to 10).map { i => val conf = new SparkConf() conf.set("spark.ui.enabled", "false") val ts = new TestHiveContext(new SparkContext("local", s"TestSQLContext$i", conf)) ts.executeSql("SHOW TABLES").toRdd.collect() ts.executeSql("SELECT * FROM src").toRdd.collect() ts.executeSql("SHOW TABLES").toRdd.collect() } } } }
Example 11
Source File: HiveTestTrait.scala From cloud-integration with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.sources import java.io.File import com.cloudera.spark.cloud.ObjectStoreConfigurations import org.scalatest.BeforeAndAfterAll import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.sql.{SparkSession, SQLContext, SQLImplicits} import org.apache.spark.sql.hive.test.TestHiveContext import org.apache.spark.sql.internal.SQLConf import org.apache.spark.util.Utils trait HiveTestTrait extends SparkFunSuite with BeforeAndAfterAll { // override protected val enableAutoThreadAudit = false protected var hiveContext: HiveInstanceForTests = _ protected var spark: SparkSession = _ protected override def beforeAll(): Unit = { super.beforeAll() // set up spark and hive context hiveContext = new HiveInstanceForTests() spark = hiveContext.sparkSession } protected override def afterAll(): Unit = { try { SparkSession.clearActiveSession() if (hiveContext != null) { hiveContext.reset() hiveContext = null } if (spark != null) { spark.close() spark = null } } finally { super.afterAll() } } } class HiveInstanceForTests extends TestHiveContext( new SparkContext( System.getProperty("spark.sql.test.master", "local[1]"), "TestSQLContext", new SparkConf() .setAll(ObjectStoreConfigurations.RW_TEST_OPTIONS) .set("spark.sql.warehouse.dir", TestSetup.makeWarehouseDir().toURI.getPath) ) ) { } object TestSetup { def makeWarehouseDir(): File = { val warehouseDir = Utils.createTempDir(namePrefix = "warehouse") warehouseDir.delete() warehouseDir } }