org.apache.commons.lang3.exception.ExceptionUtils Scala Examples
The following examples show how to use org.apache.commons.lang3.exception.ExceptionUtils.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ErrorApi.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.client.configurator import oharastream.ohara.client.HttpExecutor import org.apache.commons.lang3.exception.ExceptionUtils import spray.json.DefaultJsonProtocol._ import spray.json.RootJsonFormat object ErrorApi { final case class Error(code: String, message: String, stack: String, apiUrl: Option[String]) extends HttpExecutor.Error def of(e: Throwable): Error = Error( code = e.getClass.getName, message = if (e.getMessage == null) "unknown" else e.getMessage, stack = ExceptionUtils.getStackTrace(e), apiUrl = None ) implicit val ERROR_FORMAT: RootJsonFormat[Error] = jsonFormat4(Error) }
Example 2
Source File: SparkSQLDriver.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.util.{ArrayList => JArrayList, Arrays, List => JList} import scala.collection.JavaConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.internal.Logging import org.apache.spark.sql.{AnalysisException, SQLContext} import org.apache.spark.sql.execution.QueryExecution private[hive] class SparkSQLDriver(val context: SQLContext = SparkSQLEnv.sqlContext) extends Driver with Logging { private[hive] var tableSchema: Schema = _ private[hive] var hiveResponse: Seq[String] = _ override def init(): Unit = { } private def getResultSetSchema(query: QueryExecution): Schema = { val analyzed = query.analyzed logDebug(s"Result Schema: ${analyzed.output}") if (analyzed.output.isEmpty) { new Schema(Arrays.asList(new FieldSchema("Response code", "string", "")), null) } else { val fieldSchemas = analyzed.output.map { attr => new FieldSchema(attr.name, attr.dataType.catalogString, "") } new Schema(fieldSchemas.asJava, null) } } override def run(command: String): CommandProcessorResponse = { // TODO unify the error code try { context.sparkContext.setJobDescription(command) val execution = context.sessionState.executePlan(context.sql(command).logicalPlan) hiveResponse = execution.hiveResultString() tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) } catch { case ae: AnalysisException => logDebug(s"Failed in [$command]", ae) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(ae), null, ae) case cause: Throwable => logError(s"Failed in [$command]", cause) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null, cause) } } override def close(): Int = { hiveResponse = null tableSchema = null 0 } override def getResults(res: JList[_]): Boolean = { if (hiveResponse == null) { false } else { res.asInstanceOf[JArrayList[String]].addAll(hiveResponse.asJava) hiveResponse = null true } } override def getSchema: Schema = tableSchema override def destroy() { super.destroy() hiveResponse = null tableSchema = null } }
Example 3
Source File: AbstractFailoverOfflineTest.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.rdd.failover import com.basho.riak.client.core.query.Namespace import com.basho.riak.client.core.util.HostAndPort import com.basho.riak.stub.{RiakMessageHandler, RiakNodeStub} import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.spark.{Logging, SparkConf, SparkContext} import org.hamcrest.{Description, Matchers} import org.junit.internal.matchers.ThrowableCauseMatcher import org.junit.{After, Before} import scala.collection.JavaConversions._ abstract class AbstractFailoverOfflineTest extends Logging { protected final val NAMESPACE = new Namespace("default", "test-bucket") protected final val COVERAGE_ENTRIES_COUNT = 64 protected var sc: SparkContext = _ protected var riakNodes: Seq[(HostAndPort, RiakNodeStub)] = _ // tuple HostAndPort -> stub val riakHosts: Int = 1 val riakMessageHandler: Option[RiakMessageHandler] = None def sparkConf: SparkConf = new SparkConf(false) .setMaster("local") .setAppName(getClass.getSimpleName) .set("spark.riak.connection.host", riakNodes.map{case (hp, _) => s"${hp.getHost}:${hp.getPort}"}.mkString(",")) .set("spark.riak.output.wquorum", "1") .set("spark.riak.input.fetch-size", "2") def initRiakNodes(): Seq[(HostAndPort, RiakNodeStub)] = { require(riakMessageHandler.isDefined) // start riak stubs on localhost and free random port (1 to riakHosts).map { _ => val riakNode = RiakNodeStub(riakMessageHandler.get) riakNode.start() -> riakNode } } @Before def setUp(): Unit = { riakNodes = initRiakNodes() sc = new SparkContext(sparkConf) } @After def tearDown(): Unit = { Option(riakNodes).foreach(_.foreach(n => n._2.stop())) Option(sc).foreach(_.stop()) } def distributeEvenly(size: Int, splitCount: Int): Seq[Int] = { val (base, rem) = (size / splitCount, size % splitCount) (0 until splitCount).map(i => if (i < rem) base + 1 else base) } } class RootCauseMatcher[T <: Throwable](val excClass: Class[T]) extends ThrowableCauseMatcher[T](Matchers.isA(excClass)) { private def getOneBeforeRootCause(item: T): Throwable = { val throwables = ExceptionUtils.getThrowableList(item) if (throwables.length > 1) { throwables.reverse.tail.head } else { throwables.head } } override def matchesSafely(item: T): Boolean = super.matchesSafely(getOneBeforeRootCause(item).asInstanceOf[T]) override def describeMismatchSafely(item: T, description: Description): Unit = super.describeMismatchSafely(getOneBeforeRootCause(item).asInstanceOf[T], description) }
Example 4
Source File: TweetExample.scala From akka_streams_tutorial with MIT License | 5 votes |
package sample.stream import java.time.{Instant, ZoneId} import akka.NotUsed import akka.actor.{ActorSystem, Cancellable} import akka.stream.DelayOverflowStrategy import akka.stream.scaladsl.{Flow, MergePrioritized, Sink, Source} import org.apache.commons.lang3.exception.ExceptionUtils import org.slf4j.{Logger, LoggerFactory} import scala.concurrent.duration._ import scala.util.{Failure, Success} object TweetExample extends App { implicit val system = ActorSystem("TweetExample") implicit val ec = system.dispatcher val logger: Logger = LoggerFactory.getLogger(this.getClass) final case class Author(handle: String) final case class Hashtag(name: String) final case class Tweet(author: Author, timestamp: Long, body: String) { def hashtags: Set[Hashtag] = body.split(" ").collect { case t if t.startsWith("#") => Hashtag(t) }.toSet override def toString = { val localDateTime = Instant.ofEpochMilli(timestamp).atZone(ZoneId.systemDefault()).toLocalDateTime s"$localDateTime - ${author.handle} tweeted: ${body.take(5)}..." } } val akkaTag = Hashtag("#akka") val tweetsLowPrio: Source[Tweet, Cancellable] = Source.tick(1.second, 200.millis, NotUsed).map(_ => Tweet(Author("LowPrio"), System.currentTimeMillis, "#other #akka aBody")) val tweetsHighPrio: Source[Tweet, Cancellable] = Source.tick(2.second, 1.second, NotUsed).map(_ => Tweet(Author("HighPrio"), System.currentTimeMillis, "#akka #other aBody")) val tweetsVeryHighPrio: Source[Tweet, Cancellable] = Source.tick(2.second, 1.second, NotUsed).map(_ => Tweet(Author("VeryHighPrio"), System.currentTimeMillis, "#akka #other aBody")) val limitedTweets: Source[Tweet, NotUsed] = Source.combine(tweetsLowPrio, tweetsHighPrio, tweetsVeryHighPrio)(_ => MergePrioritized(List(1, 10, 100))).take(20) val processingFlow = Flow[Tweet] .filter(_.hashtags.contains(akkaTag)) .wireTap(each => logger.info(s"$each")) val slowDownstream = Flow[Tweet] .delay(5.seconds, DelayOverflowStrategy.backpressure) val processedTweets = limitedTweets .via(processingFlow) .via(slowDownstream) .runWith(Sink.seq) processedTweets.onComplete { case Success(results) => logger.info(s"Successfully processed: ${results.size} tweets") system.terminate() case Failure(exception) => logger.info(s"The stream failed with: ${ExceptionUtils.getRootCause(exception)}") system.terminate() } }
Example 5
Source File: WebsocketClientActor.scala From akka_streams_tutorial with MIT License | 5 votes |
package alpakka.tcp_to_websockets.websockets import akka.actor.{Actor, ActorLogging, ActorRef, Props} import akka.http.scaladsl.model.StatusCode import alpakka.tcp_to_websockets.websockets.WebsocketClientActor._ import org.apache.commons.lang3.exception.ExceptionUtils import scala.concurrent.duration._ case class ConnectionException(cause: String) extends RuntimeException object WebsocketClientActor { def props(id: String, endpoint: String, websocketConnectionStatusActor: ActorRef) = Props(new WebsocketClientActor(id, endpoint, websocketConnectionStatusActor)) final case object Upgraded final case object Connected final case object Terminated final case class ConnectionFailure(ex: Throwable) final case class FailedUpgrade(statusCode: StatusCode) final case class SendMessage(msg: String) } class WebsocketClientActor(id: String, endpoint: String, websocketConnectionStatusActor: ActorRef) extends Actor with ActorLogging { implicit private val system = context.system implicit private val executionContext = system.dispatcher val webSocketClient = WebSocketClient(id, endpoint, self) override def receive: Receive = startup //initial state private def startup: Receive = { case Upgraded => log.info(s"Client$id: WebSocket upgraded") case FailedUpgrade(statusCode) => log.error(s"Client$id: failed to upgrade WebSocket connection: $statusCode") websocketConnectionStatusActor ! WebsocketConnectionStatusActor.Terminated throw ConnectionException(statusCode.toString()) case ConnectionFailure(ex) => log.error(s"Client $id: failed to establish WebSocket connection: $ex") websocketConnectionStatusActor ! WebsocketConnectionStatusActor.Terminated throw ConnectionException(ExceptionUtils.getRootCause(ex).getMessage) case Connected => log.info(s"Client $id: WebSocket connected") websocketConnectionStatusActor ! WebsocketConnectionStatusActor.Connected context.become(running) case SendMessage(msg) => log.warning(s"In state startup. Can not receive message: $msg. Resend after 2 seconds") system.scheduler.scheduleOnce(2.seconds, self, SendMessage(msg)) } private def running: Receive = { case SendMessage(msg) => log.info(s"About to send message to WebSocket: $msg") webSocketClient.sendToWebsocket(msg) case Terminated => log.error(s"Client $id: WebSocket connection terminated") websocketConnectionStatusActor ! WebsocketConnectionStatusActor.Terminated throw ConnectionException(s"Client $id: WebSocket connection terminated") case ConnectionFailure(ex) => log.error(s"Client $id: ConnectionFailure occurred: $ex") websocketConnectionStatusActor ! WebsocketConnectionStatusActor.Terminated throw ConnectionException(ExceptionUtils.getRootCause(ex).getMessage) } override def postStop(): Unit = { websocketConnectionStatusActor ! WebsocketConnectionStatusActor.Terminated } }
Example 6
Source File: SparkSQLDriver.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.util.{ArrayList => JArrayList, Arrays, List => JList} import scala.collection.JavaConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.internal.Logging import org.apache.spark.sql.{AnalysisException, SQLContext} import org.apache.spark.sql.execution.{QueryExecution, SQLExecution} private[hive] class SparkSQLDriver(val context: SQLContext = SparkSQLEnv.sqlContext) extends Driver with Logging { private[hive] var tableSchema: Schema = _ private[hive] var hiveResponse: Seq[String] = _ override def init(): Unit = { } private def getResultSetSchema(query: QueryExecution): Schema = { val analyzed = query.analyzed logDebug(s"Result Schema: ${analyzed.output}") if (analyzed.output.isEmpty) { new Schema(Arrays.asList(new FieldSchema("Response code", "string", "")), null) } else { val fieldSchemas = analyzed.output.map { attr => new FieldSchema(attr.name, attr.dataType.catalogString, "") } new Schema(fieldSchemas.asJava, null) } } override def run(command: String): CommandProcessorResponse = { // TODO unify the error code try { context.sparkContext.setJobDescription(command) val execution = context.sessionState.executePlan(context.sql(command).logicalPlan) hiveResponse = SQLExecution.withNewExecutionId(context.sparkSession, execution) { execution.hiveResultString() } tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) } catch { case ae: AnalysisException => logDebug(s"Failed in [$command]", ae) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(ae), null, ae) case cause: Throwable => logError(s"Failed in [$command]", cause) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null, cause) } } override def close(): Int = { hiveResponse = null tableSchema = null 0 } override def getResults(res: JList[_]): Boolean = { if (hiveResponse == null) { false } else { res.asInstanceOf[JArrayList[String]].addAll(hiveResponse.asJava) hiveResponse = null true } } override def getSchema: Schema = tableSchema override def destroy() { super.destroy() hiveResponse = null tableSchema = null } }
Example 7
Source File: ProtobufScoringController.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.springboot import java.util.concurrent.CompletionStage import akka.actor.ActorSystem import com.google.protobuf.ByteString import ml.combust.mleap.executor._ import ml.combust.mleap.pb.TransformStatus.STATUS_ERROR import ml.combust.mleap.pb.{BundleMeta, Mleap, Model, TransformFrameResponse} import ml.combust.mleap.runtime.serialization.{FrameReader, FrameWriter} import ml.combust.mleap.springboot.TypeConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.slf4j.LoggerFactory import org.springframework.beans.factory.annotation.Autowired import org.springframework.http.HttpStatus import org.springframework.web.bind.annotation._ import scala.compat.java8.FutureConverters._ import scala.concurrent.Future import scala.util.{Failure, Success} @RestController @RequestMapping class ProtobufScoringController(@Autowired val actorSystem : ActorSystem, @Autowired val mleapExecutor: MleapExecutor) { private val executor = actorSystem.dispatcher @PostMapping(path = Array("/models"), consumes = Array("application/x-protobuf; charset=UTF-8"), produces = Array("application/x-protobuf; charset=UTF-8")) @ResponseStatus(HttpStatus.ACCEPTED) def loadModel(@RequestBody request: Mleap.LoadModelRequest, @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[Mleap.Model] = { mleapExecutor .loadModel(javaPbToExecutorLoadModelRequest(request))(timeout) .map(model => Model.toJavaProto(model))(executor).toJava } @DeleteMapping(path = Array("/models/{model_name}"), consumes = Array("application/x-protobuf; charset=UTF-8"), produces = Array("application/x-protobuf; charset=UTF-8")) def unloadModel(@PathVariable("model_name") modelName: String, @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int): CompletionStage[Mleap.Model] = mleapExecutor .unloadModel(UnloadModelRequest(modelName))(timeout) .map(model => Model.toJavaProto(model))(executor).toJava @GetMapping(path = Array("/models/{model_name}"), consumes = Array("application/x-protobuf; charset=UTF-8"), produces = Array("application/x-protobuf; charset=UTF-8")) def getModel(@PathVariable("model_name") modelName: String, @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int): CompletionStage[Mleap.Model] = mleapExecutor .getModel(GetModelRequest(modelName))(timeout) .map(model => Model.toJavaProto(model))(executor).toJava @GetMapping(path = Array("/models/{model_name}/meta"), consumes = Array("application/x-protobuf; charset=UTF-8"), produces = Array("application/x-protobuf; charset=UTF-8")) def getMeta(@PathVariable("model_name") modelName: String, @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[Mleap.BundleMeta] = mleapExecutor .getBundleMeta(GetBundleMetaRequest(modelName))(timeout) .map(meta => BundleMeta.toJavaProto(meta))(executor).toJava @PostMapping(path = Array("/models/transform"), consumes = Array("application/x-protobuf; charset=UTF-8"), produces = Array("application/x-protobuf; charset=UTF-8")) def transform(@RequestBody request: Mleap.TransformFrameRequest, @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[Mleap.TransformFrameResponse] = { FrameReader(request.getFormat).fromBytes(request.getFrame.toByteArray) match { case Success(frame) => mleapExecutor.transform(TransformFrameRequest(request.getModelName, frame, request.getOptions))(timeout) .mapAll { case Success(resp) => resp match { case Success(frame) => TransformFrameResponse(tag = request.getTag, frame = ByteString.copyFrom(FrameWriter(frame, request.getFormat).toBytes().get)) case Failure(ex) => handleTransformFailure(request.getTag, ex) } case Failure(ex) => handleTransformFailure(request.getTag, ex) }(executor) .map(response => TransformFrameResponse.toJavaProto(response))(executor).toJava case Failure(ex) => Future { TransformFrameResponse.toJavaProto(handleTransformFailure(request.getTag, ex)) }(executor).toJava } } private def handleTransformFailure(tag: Long, ex: Throwable): TransformFrameResponse = { ProtobufScoringController.logger.error("Transform error due to ", ex) TransformFrameResponse(tag = tag, status = STATUS_ERROR, error = ExceptionUtils.getMessage(ex), backtrace = ExceptionUtils.getStackTrace(ex)) } } object ProtobufScoringController { val logger = LoggerFactory.getLogger(classOf[ProtobufScoringController]) }
Example 8
Source File: LeapFrameScoringController.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.springboot import java.util.concurrent.CompletionStage import akka.actor.ActorSystem import ml.combust.mleap.executor.{MleapExecutor, TransformFrameRequest} import ml.combust.mleap.pb.ErrorTransformResponse import ml.combust.mleap.pb.TransformStatus.STATUS_ERROR import ml.combust.mleap.runtime.serialization.{BuiltinFormats, FrameReader, FrameWriter} import ml.combust.mleap.springboot.TypeConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.json4s.jackson.JsonMethods import org.slf4j.LoggerFactory import org.springframework.beans.factory.annotation.Autowired import org.springframework.web.bind.annotation._ import scala.compat.java8.FutureConverters._ import scala.concurrent.Future import scala.util.{Failure, Success} import scalapb.json4s.{Parser, Printer} @RestController @RequestMapping class LeapFrameScoringController(@Autowired val actorSystem : ActorSystem, @Autowired val mleapExecutor: MleapExecutor, @Autowired val jsonPrinter: Printer, @Autowired val jsonParser: Parser) { private val executor = actorSystem.dispatcher @PostMapping(path = Array("/models/{model_name}/transform"), consumes = Array("application/json; charset=UTF-8"), produces = Array("application/json; charset=UTF-8")) def transformJson(@RequestBody body: Array[Byte], @PathVariable("model_name") modelName: String, @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[_] = { FrameReader(BuiltinFormats.json).fromBytes(body) match { case Success(frame) => mleapExecutor.transform(TransformFrameRequest(modelName, frame, None))(timeout) .mapAll { case Success(resp) => resp match { case Success(frame) => FrameWriter(frame, BuiltinFormats.json).toBytes().get case Failure(ex) => JsonMethods.compact(jsonPrinter.toJson(handleTransformFailure(ex))) } case Failure(ex) => JsonMethods.compact(jsonPrinter.toJson(handleTransformFailure(ex))) }(executor) .toJava case Failure(ex) => Future { JsonMethods.compact(jsonPrinter.toJson(handleTransformFailure(ex))) }(executor).toJava } } @PostMapping(path = Array("/models/{model_name}/transform"), consumes = Array("application/x-protobuf; charset=UTF-8"), produces = Array("application/x-protobuf; charset=UTF-8")) def transformProto(@RequestBody body: Array[Byte], @PathVariable("model_name") modelName: String, @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[_] = { FrameReader(BuiltinFormats.binary).fromBytes(body) match { case Success(frame) => mleapExecutor.transform(TransformFrameRequest(modelName, frame, None))(timeout) .mapAll { case Success(resp) => resp match { case Success(frame) => FrameWriter(frame, BuiltinFormats.binary).toBytes().get case Failure(ex) => ErrorTransformResponse.toJavaProto(handleTransformFailure(ex)) } case Failure(ex) => ErrorTransformResponse.toJavaProto(handleTransformFailure(ex)) }(executor).toJava case Failure(ex) => Future { ErrorTransformResponse.toJavaProto(handleTransformFailure(ex)) }(executor).toJava } } private def handleTransformFailure(ex: Throwable): ErrorTransformResponse = { LeapFrameScoringController.logger.error("Transform error due to ", ex) ErrorTransformResponse(status = STATUS_ERROR, error = ExceptionUtils.getMessage(ex), backtrace = ExceptionUtils.getStackTrace(ex)) } } object LeapFrameScoringController { val logger = LoggerFactory.getLogger(classOf[LeapFrameScoringController]) }
Example 9
Source File: SparkSQLDriver.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.util.{ArrayList => JArrayList, Arrays, List => JList} import scala.collection.JavaConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.internal.Logging import org.apache.spark.sql.{AnalysisException, SQLContext} import org.apache.spark.sql.execution.QueryExecution private[hive] class SparkSQLDriver(val context: SQLContext = SparkSQLEnv.sqlContext) extends Driver with Logging { private[hive] var tableSchema: Schema = _ private[hive] var hiveResponse: Seq[String] = _ override def init(): Unit = { } private def getResultSetSchema(query: QueryExecution): Schema = { val analyzed = query.analyzed logDebug(s"Result Schema: ${analyzed.output}") if (analyzed.output.isEmpty) { new Schema(Arrays.asList(new FieldSchema("Response code", "string", "")), null) } else { val fieldSchemas = analyzed.output.map { attr => new FieldSchema(attr.name, attr.dataType.catalogString, "") } new Schema(fieldSchemas.asJava, null) } } override def run(command: String): CommandProcessorResponse = { // TODO unify the error code try { context.sparkContext.setJobDescription(command) val execution = context.sessionState.executePlan(context.sql(command).logicalPlan) hiveResponse = execution.hiveResultString() tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) } catch { case ae: AnalysisException => logDebug(s"Failed in [$command]", ae) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(ae), null, ae) case cause: Throwable => logError(s"Failed in [$command]", cause) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null, cause) } } override def close(): Int = { hiveResponse = null tableSchema = null 0 } override def getResults(res: JList[_]): Boolean = { if (hiveResponse == null) { false } else { res.asInstanceOf[JArrayList[String]].addAll(hiveResponse.asJava) hiveResponse = null true } } override def getSchema: Schema = tableSchema override def destroy() { super.destroy() hiveResponse = null tableSchema = null } }
Example 10
Source File: SparkSQLDriver.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.util.{Arrays, ArrayList => JArrayList, List => JList} import scala.collection.JavaConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.internal.Logging import org.apache.spark.sql.{AnalysisException, SQLContext, SparkSession} import org.apache.spark.sql.execution.QueryExecution private[hive] class SparkSQLDriver(val sparkSession: SparkSession = SparkSQLEnv.sparkSession) extends Driver with Logging { private[hive] var tableSchema: Schema = _ private[hive] var hiveResponse: Seq[String] = _ override def init(): Unit = { } private def getResultSetSchema(query: QueryExecution): Schema = { val analyzed = query.analyzed logDebug(s"Result Schema: ${analyzed.output}") if (analyzed.output.isEmpty) { new Schema(Arrays.asList(new FieldSchema("Response code", "string", "")), null) } else { val fieldSchemas = analyzed.output.map { attr => new FieldSchema(attr.name, attr.dataType.catalogString, "") } new Schema(fieldSchemas.asJava, null) } } override def run(command: String): CommandProcessorResponse = { // TODO unify the error code try { sparkSession.sparkContext.setJobDescription(command) val execution = sparkSession.sessionState.executePlan(sparkSession.sql(command).logicalPlan) hiveResponse = execution.hiveResultString() tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) } catch { case ae: AnalysisException => logDebug(s"Failed in [$command]", ae) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(ae), null, ae) case cause: Throwable => logError(s"Failed in [$command]", cause) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null, cause) } } override def close(): Int = { hiveResponse = null tableSchema = null 0 } override def getResults(res: JList[_]): Boolean = { if (hiveResponse == null) { false } else { res.asInstanceOf[JArrayList[String]].addAll(hiveResponse.asJava) hiveResponse = null true } } override def getSchema: Schema = tableSchema override def destroy() { super.destroy() hiveResponse = null tableSchema = null } }
Example 11
Source File: AbstractSparkSQLDriver.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import scala.collection.JavaConversions._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.Logging import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes} private[hive] abstract class AbstractSparkSQLDriver( val context: HiveContext = SparkSQLEnv.hiveContext) extends Driver with Logging { private[hive] var tableSchema: Schema = _ private[hive] var hiveResponse: Seq[String] = _ override def init(): Unit = { } private def getResultSetSchema(query: context.QueryExecution): Schema = { val analyzed = query.analyzed logDebug(s"Result Schema: ${analyzed.output}") if (analyzed.output.size == 0) { new Schema(new FieldSchema("Response code", "string", "") :: Nil, null) } else { val fieldSchemas = analyzed.output.map { attr => new FieldSchema(attr.name, HiveMetastoreTypes.toMetastoreType(attr.dataType), "") } new Schema(fieldSchemas, null) } } override def run(command: String): CommandProcessorResponse = { // TODO unify the error code try { context.sparkContext.setJobDescription(command) val execution = context.executePlan(context.sql(command).logicalPlan) hiveResponse = execution.stringResult() tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) } } def runWrapper(command: String): CommandProcessorResponseWrapper = try { val result = run(command) new CommandProcessorResponseWrapper(result, null) } catch { case ae: AnalysisException => logDebug(s"Failed in [$command]", ae) new CommandProcessorResponseWrapper(new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(ae), null), ae) case cause: Throwable => logError(s"Failed in [$command]", cause) new CommandProcessorResponseWrapper(new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null), cause) } override def close(): Int = { hiveResponse = null tableSchema = null 0 } override def getSchema: Schema = tableSchema override def destroy() { super.destroy() hiveResponse = null tableSchema = null } } private[hive] case class CommandProcessorResponseWrapper( rc : CommandProcessorResponse, cause : Throwable)
Example 12
Source File: SparkSQLDriver.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.util.{ArrayList => JArrayList, List => JList} import scala.collection.JavaConversions._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.Logging import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes} private[hive] class SparkSQLDriver( val context: HiveContext = SparkSQLEnv.hiveContext) extends Driver with Logging { private[hive] var tableSchema: Schema = _ private[hive] var hiveResponse: Seq[String] = _ override def init(): Unit = { } private def getResultSetSchema(query: context.QueryExecution): Schema = { val analyzed = query.analyzed logDebug(s"Result Schema: ${analyzed.output}") if (analyzed.output.size == 0) { new Schema(new FieldSchema("Response code", "string", "") :: Nil, null) } else { val fieldSchemas = analyzed.output.map { attr => new FieldSchema(attr.name, HiveMetastoreTypes.toMetastoreType(attr.dataType), "") } new Schema(fieldSchemas, null) } } override def run(command: String): CommandProcessorResponse = { // TODO unify the error code try { context.sparkContext.setJobDescription(command) val execution = context.executePlan(context.sql(command).logicalPlan) hiveResponse = execution.stringResult() tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) } catch { case ae: AnalysisException => logDebug(s"Failed in [$command]", ae) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(ae), null, ae) case cause: Throwable => logError(s"Failed in [$command]", cause) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null, cause) } } override def close(): Int = { hiveResponse = null tableSchema = null 0 } override def getResults(res: JList[_]): Boolean = { if (hiveResponse == null) { false } else { res.asInstanceOf[JArrayList[String]].addAll(hiveResponse) hiveResponse = null true } } override def getSchema: Schema = tableSchema override def destroy() { super.destroy() hiveResponse = null tableSchema = null } }
Example 13
Source File: SparkSQLDriver.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.util.{ArrayList => JArrayList, Arrays, List => JList} import scala.collection.JavaConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.internal.Logging import org.apache.spark.sql.{AnalysisException, SQLContext} import org.apache.spark.sql.execution.{QueryExecution, SQLExecution} private[hive] class SparkSQLDriver(val context: SQLContext = SparkSQLEnv.sqlContext) extends Driver with Logging { private[hive] var tableSchema: Schema = _ private[hive] var hiveResponse: Seq[String] = _ override def init(): Unit = { } private def getResultSetSchema(query: QueryExecution): Schema = { val analyzed = query.analyzed logDebug(s"Result Schema: ${analyzed.output}") if (analyzed.output.isEmpty) { new Schema(Arrays.asList(new FieldSchema("Response code", "string", "")), null) } else { val fieldSchemas = analyzed.output.map { attr => new FieldSchema(attr.name, attr.dataType.catalogString, "") } new Schema(fieldSchemas.asJava, null) } } override def run(command: String): CommandProcessorResponse = { // TODO unify the error code try { context.sparkContext.setJobDescription(command) val execution = context.sessionState.executePlan(context.sql(command).logicalPlan) hiveResponse = SQLExecution.withNewExecutionId(context.sparkSession, execution) { execution.hiveResultString() } tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) } catch { case ae: AnalysisException => logDebug(s"Failed in [$command]", ae) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(ae), null, ae) case cause: Throwable => logError(s"Failed in [$command]", cause) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null, cause) } } override def close(): Int = { hiveResponse = null tableSchema = null 0 } override def getResults(res: JList[_]): Boolean = { if (hiveResponse == null) { false } else { res.asInstanceOf[JArrayList[String]].addAll(hiveResponse.asJava) hiveResponse = null true } } override def getSchema: Schema = tableSchema override def destroy() { super.destroy() hiveResponse = null tableSchema = null } }
Example 14
Source File: SparkSQLDriver.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.util.{Arrays, ArrayList => JArrayList, List => JList} import org.apache.log4j.LogManager import org.apache.spark.sql.AnalysisException import scala.collection.JavaConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.Logging import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes} private[hive] class SparkSQLDriver( val context: HiveContext = SparkSQLEnv.hiveContext) extends Driver with Logging { private[hive] var tableSchema: Schema = _ private[hive] var hiveResponse: Seq[String] = _ override def init(): Unit = { } private def getResultSetSchema(query: context.QueryExecution): Schema = { val analyzed = query.analyzed logDebug(s"Result Schema: ${analyzed.output}") if (analyzed.output.isEmpty) { new Schema(Arrays.asList(new FieldSchema("Response code", "string", "")), null) } else { val fieldSchemas = analyzed.output.map { attr => new FieldSchema(attr.name, HiveMetastoreTypes.toMetastoreType(attr.dataType), "") } new Schema(fieldSchemas.asJava, null) } } override def run(command: String): CommandProcessorResponse = { // TODO unify the error code try { context.sparkContext.setJobDescription(command) val execution = context.executePlan(context.sql(command).logicalPlan) hiveResponse = execution.stringResult() tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) } catch { case ae: AnalysisException => logDebug(s"Failed in [$command]", ae) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(ae), null, ae) case cause: Throwable => logError(s"Failed in [$command]", cause) new CommandProcessorResponse(1, ExceptionUtils.getStackTrace(cause), null, cause) } } override def close(): Int = { hiveResponse = null tableSchema = null 0 } override def getResults(res: JList[_]): Boolean = { if (hiveResponse == null) { false } else { res.asInstanceOf[JArrayList[String]].addAll(hiveResponse.asJava) hiveResponse = null true } } override def getSchema: Schema = tableSchema override def destroy() { super.destroy() hiveResponse = null tableSchema = null } }