java.lang.reflect.InvocationTargetException Scala Examples
The following examples show how to use java.lang.reflect.InvocationTargetException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: UserDefinedFunctionBase.scala From incubator-daffodil with Apache License 2.0 | 5 votes |
package org.apache.daffodil.dpath import org.apache.daffodil.udf.UserDefinedFunction import org.apache.daffodil.udf.UserDefinedFunctionProcessingErrorException import org.apache.daffodil.udf.exceptions.UserDefinedFunctionProcessingError import org.apache.daffodil.udf.UserDefinedFunctionFatalErrorException import org.apache.daffodil.udf.UserDefinedFunctionService.UserDefinedFunctionMethod import java.lang.reflect.InvocationTargetException import org.apache.daffodil.util.Maybe import org.apache.daffodil.infoset.DataValue.DataValuePrimitive import org.apache.daffodil.infoset.DataValue case class UserDefinedFunctionCall( functionQNameString: String, recipes: List[CompiledDPath], userDefinedFunction: UserDefinedFunction, evaluateFxn: UserDefinedFunctionMethod) extends FNArgsList(recipes) { override def computeValue(values: List[DataValuePrimitive], dstate: DState) = { val jValues = values.map { _.getAnyRef.asInstanceOf[Object] } try { val res = evaluateFxn.method.invoke(userDefinedFunction, jValues: _*) DataValue.unsafeFromAnyRef(res) } catch { case e: InvocationTargetException => { // wraps any error thrown by invoked method (i.e UDF.evaluate) val targetException = e.getTargetException targetException match { case te: UserDefinedFunctionProcessingError => throw new UserDefinedFunctionProcessingErrorException( s"User Defined Function '$functionQNameString'", Maybe(dstate.compileInfo.schemaFileLocation), dstate.contextLocation, Maybe(te), Maybe.Nope) case te: Exception => throw new UserDefinedFunctionFatalErrorException( s"User Defined Function '$functionQNameString' Error", te, userDefinedFunction.getClass.getName) } } case e @ (_: IllegalArgumentException | _: NullPointerException | _: ReflectiveOperationException) => throw new UserDefinedFunctionProcessingErrorException( s"User Defined Function '$functionQNameString'", Maybe(dstate.compileInfo.schemaFileLocation), dstate.contextLocation, Maybe(e), Maybe.Nope) case e: ExceptionInInitializerError => throw new UserDefinedFunctionFatalErrorException( s"User Defined Function '$functionQNameString' Error", e, userDefinedFunction.getClass.getName) } } }
Example 2
Source File: DistributionLikeFactory.scala From warp-core with MIT License | 5 votes |
package com.workday.telemetron.math import java.lang.reflect.InvocationTargetException def getDistribution[T <: DistributionLike](distributionClass: Class[T], parameters: Array[Double]): T = { try { distributionClass.getDeclaredConstructor(parameters.getClass).newInstance(parameters) } catch { case exception@(_: InstantiationException | _: IllegalAccessException | _: InvocationTargetException | _: NoSuchMethodException) => throw new RuntimeException(s"unable to create ${distributionClass.getCanonicalName} with parameters ${parameters mkString ", "}", exception) } } }
Example 3
Source File: invoke_scala.scala From libisabelle with Apache License 2.0 | 5 votes |
package isabelle import java.lang.reflect.{Method, Modifier, InvocationTargetException} import scala.util.matching.Regex object Invoke_Scala { class Invoke_Scala extends Session.Protocol_Handler { private var session: Session = null private var futures = Map.empty[String, Future[Unit]] override def init(init_session: Session): Unit = synchronized { session = init_session } override def exit(): Unit = synchronized { for ((id, future) <- futures) cancel(id, future) futures = Map.empty } private def fulfill(id: String, tag: Invoke_Scala.Tag.Value, res: String): Unit = synchronized { if (futures.isDefinedAt(id)) { session.protocol_command("Invoke_Scala.fulfill", id, tag.id.toString, res) futures -= id } } private def cancel(id: String, future: Future[Unit]) { future.cancel fulfill(id, Invoke_Scala.Tag.INTERRUPT, "") } private def invoke_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Invoke_Scala(name, id) => futures += (id -> Future.fork { val (tag, result) = Invoke_Scala.method(name, msg.text) fulfill(id, tag, result) }) true case _ => false } } private def cancel_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Cancel_Scala(id) => futures.get(id) match { case Some(future) => cancel(id, future) case None => } true case _ => false } } val functions = List( Markup.INVOKE_SCALA -> invoke_scala _, Markup.CANCEL_SCALA -> cancel_scala _) }
Example 4
Source File: invoke_scala.scala From libisabelle with Apache License 2.0 | 5 votes |
package isabelle import java.lang.reflect.{Method, Modifier, InvocationTargetException} import scala.util.matching.Regex object Invoke_Scala { class Invoke_Scala extends Session.Protocol_Handler { private var session: Session = null private var futures = Map.empty[String, Future[Unit]] override def init(init_session: Session): Unit = synchronized { session = init_session } override def exit(): Unit = synchronized { for ((id, future) <- futures) cancel(id, future) futures = Map.empty } private def fulfill(id: String, tag: Invoke_Scala.Tag.Value, res: String): Unit = synchronized { if (futures.isDefinedAt(id)) { session.protocol_command("Invoke_Scala.fulfill", id, tag.id.toString, res) futures -= id } } private def cancel(id: String, future: Future[Unit]) { future.cancel fulfill(id, Invoke_Scala.Tag.INTERRUPT, "") } private def invoke_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Invoke_Scala(name, id) => futures += (id -> Future.fork { val (tag, result) = Invoke_Scala.method(name, msg.text) fulfill(id, tag, result) }) true case _ => false } } private def cancel_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Cancel_Scala(id) => futures.get(id) match { case Some(future) => cancel(id, future) case None => } true case _ => false } } val functions = List( Markup.INVOKE_SCALA -> invoke_scala _, Markup.CANCEL_SCALA -> cancel_scala _) }
Example 5
Source File: invoke_scala.scala From libisabelle with Apache License 2.0 | 5 votes |
package isabelle import java.lang.reflect.{Method, Modifier, InvocationTargetException} import scala.util.matching.Regex object Invoke_Scala { class Invoke_Scala extends Session.Protocol_Handler { private var session: Session = null private var futures = Map.empty[String, Future[Unit]] override def init(init_session: Session): Unit = synchronized { session = init_session } override def exit(): Unit = synchronized { for ((id, future) <- futures) cancel(id, future) futures = Map.empty } private def fulfill(id: String, tag: Invoke_Scala.Tag.Value, res: String): Unit = synchronized { if (futures.isDefinedAt(id)) { session.protocol_command("Invoke_Scala.fulfill", id, tag.id.toString, res) futures -= id } } private def cancel(id: String, future: Future[Unit]) { future.cancel fulfill(id, Invoke_Scala.Tag.INTERRUPT, "") } private def invoke_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Invoke_Scala(name, id) => futures += (id -> Future.fork { val (tag, result) = Invoke_Scala.method(name, msg.text) fulfill(id, tag, result) }) true case _ => false } } private def cancel_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Cancel_Scala(id) => futures.get(id) match { case Some(future) => cancel(id, future) case None => } true case _ => false } } val functions = List( Markup.INVOKE_SCALA -> invoke_scala _, Markup.CANCEL_SCALA -> cancel_scala _) }
Example 6
Source File: JsonConversions.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.api import java.lang.reflect.InvocationTargetException import org.json4s.{MappingException, DefaultFormats, Formats} import spray.http.{HttpCharsets, HttpEntity, MediaTypes} import spray.httpx.Json4sJacksonSupport import spray.httpx.unmarshalling.Unmarshaller object JsonConversions extends Json4sJacksonSupport { implicit def json4sJacksonFormats: Formats = DefaultFormats implicit def jsonApiUnmarshaller[T: Manifest] = Unmarshaller[T](MediaTypes.`application/json`) { case x: HttpEntity.NonEmpty ⇒ try serialization.read[T](x.asString(defaultCharset = HttpCharsets.`UTF-8`)) catch { case MappingException("unknown error", ite: InvocationTargetException) ⇒ throw ite.getCause } } }
Example 7
Source File: StreamToPublisherSpec.scala From interop-reactive-streams with Apache License 2.0 | 5 votes |
package zio.interop.reactivestreams import java.lang.reflect.InvocationTargetException import org.reactivestreams.Publisher import org.reactivestreams.tck.{ PublisherVerification, TestEnvironment } import org.testng.annotations.Test import zio.Task import zio.UIO import zio.ZIO import zio.blocking._ import zio.stream.Stream import zio.test._ import zio.test.Assertion._ object StreamToPublisherSpec extends DefaultRunnableSpec { override def spec = suite("Converting a `Stream` to a `Publisher`")( suite("passes all required and optional TCK tests")(tests: _*) ) def makePV(runtime: zio.Runtime[Any]) = new PublisherVerification[Int](new TestEnvironment(2000, 500), 2000L) { def createPublisher(elements: Long): Publisher[Int] = runtime.unsafeRun( Stream .unfold(elements)(n => if (n > 0) Some((1, n - 1)) else None) .toPublisher ) override def createFailedPublisher(): Publisher[Int] = runtime.unsafeRun( Stream .fail(new RuntimeException("boom!")) .map(_.asInstanceOf[Int]) .toPublisher ) } val tests = classOf[PublisherVerification[Int]] .getMethods() .toList .filter { method => method .getAnnotations() .exists(annotation => classOf[Test].isAssignableFrom(annotation.annotationType())) } .collect { case method if method.getName().startsWith("untested") => test(method.getName())(assert(())(anything)) @@ TestAspect.ignore case method => testM(method.getName())( for { runtime <- ZIO.runtime[Any] pv = makePV(runtime) _ <- UIO(pv.setUp()) r <- blocking(Task(method.invoke(pv))).unit.mapError { case e: InvocationTargetException => e.getTargetException() }.run } yield assert(r)(succeeds(isUnit)) ) } }
Example 8
Source File: Json4sSupport.scala From service-container with Apache License 2.0 | 5 votes |
package com.github.vonnagy.service.container.http.json import java.lang.reflect.InvocationTargetException import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.util.ByteString import org.json4s.JsonAST.JValue import org.json4s.{Formats, MappingException, Serialization} implicit def json4sMarshaller[A <: AnyRef]( implicit serialization: Serialization, formats: Formats, shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False ): ToEntityMarshaller[A] = { shouldWritePretty match { case ShouldWritePretty.False => jsonStringMarshaller.compose(serialization.write[A]) case ShouldWritePretty.True => jsonStringMarshaller.compose(serialization.writePretty[A]) } } implicit def json4sJValueMarshaller[A <: JValue]( implicit serialization: Serialization, formats: Formats, shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False ): ToEntityMarshaller[A] = { shouldWritePretty match { case ShouldWritePretty.False => jsonStringMarshaller.compose(serialization.write[A]) case ShouldWritePretty.True => jsonStringMarshaller.compose(serialization.writePretty[A]) } } }
Example 9
Source File: CryptoAlgebraSpec.scala From vinyldns with Apache License 2.0 | 5 votes |
package vinyldns.core.crypto import java.lang.reflect.InvocationTargetException import com.typesafe.config.{Config, ConfigException, ConfigFactory} import scala.collection.JavaConverters._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class TestCrypto(config: Config) extends CryptoAlgebra { val testMe: String = config.getString("test-me") def encrypt(value: String): String = value def decrypt(value: String): String = value } class CryptoAlgebraSpec extends AnyWordSpec with Matchers { private val conf = """ | type = "vinyldns.core.crypto.NoOpCrypto" | test-me = "hello" """.stripMargin private val cryptoConf = ConfigFactory.parseString(conf) "CryptoAlgebra" should { "load the expected crypto instance" in { CryptoAlgebra.load(cryptoConf).unsafeRunSync() shouldBe a[NoOpCrypto] } "throw an exception if config is missing type" in { val badConfig = ConfigFactory.empty() a[ConfigException] should be thrownBy CryptoAlgebra.load(badConfig).unsafeRunSync() } "return ok if all params are provided" in { val opts = Map("type" -> "vinyldns.core.crypto.TestCrypto", "test-me" -> "wassup") val goodConfig = ConfigFactory.parseMap(opts.asJava) val ok = CryptoAlgebra.load(goodConfig).unsafeRunSync().asInstanceOf[TestCrypto] ok.testMe shouldBe "wassup" } "throw an exception if config is missing items required by the class" in { val opts = Map("type" -> "vinyldns.core.crypto.TestCrypto") val badConfig = ConfigFactory.parseMap(opts.asJava) val thrown = the[InvocationTargetException] thrownBy CryptoAlgebra .load(badConfig) .unsafeRunSync() thrown.getCause shouldBe a[ConfigException] } } }
Example 10
Source File: ExecutorProxy.scala From ncdbg with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.programmaticallyspeaking.ncd.infra import java.lang.reflect.{InvocationHandler, InvocationTargetException, Method} import java.util.concurrent.Executor import java.util.concurrent.atomic.AtomicInteger import org.slf4s.Logging import scala.concurrent.duration._ import scala.concurrent.{Await, Future, Promise, TimeoutException} import scala.reflect.ClassTag import scala.util.{Failure, Success, Try} class ExecutorProxy(executor: Executor) { import scala.collection.JavaConverters._ def createFor[A <: AnyRef : ClassTag](instance: A): A = { val clazz = implicitly[ClassTag[A]].runtimeClass java.lang.reflect.Proxy.newProxyInstance(clazz.getClassLoader, Array(clazz), new Handler(instance)).asInstanceOf[A] } class Handler(instance: AnyRef) extends InvocationHandler with Logging { import scala.concurrent.ExecutionContext.Implicits._ private val className = instance.getClass.getName private val idGen = new AtomicInteger(0) private var awaitingCalls = Map[Int, String]() override def invoke(proxy: scala.Any, method: Method, args: Array[AnyRef]): AnyRef = { val resultPromise = Promise[AnyRef]() val before = System.nanoTime() val id = idGen.getAndIncrement() val argss = Option(args).getOrElse(Array.empty) val desc = s"$method(${argss.mkString(", ")})[$id]" log.trace(s"Waiting to execute: $desc") // Snapshot of waiting calls prior to submitting to the executor val waitingCallsAtEntry = awaitingCalls executor.execute(() => { log.trace(s"Execute: $id") Try(method.invoke(instance, args: _*)) match { case Success(f: Future[_]) => resultPromise.completeWith(f.asInstanceOf[Future[AnyRef]]) case Success(result) => resultPromise.success(result) case Failure(t: InvocationTargetException) => resultPromise.failure(t.getCause) case Failure(t) => resultPromise.failure(t) } }) resultPromise.future.onComplete { _ => val methodName = method.getName val millis = (System.nanoTime() - before).nanos.toMillis log.trace(s"Elapsed time for $className.$methodName = $millis ms") } if (classOf[Future[_]].isAssignableFrom(method.getReturnType)) resultPromise.future else { // Update with this call awaitingCalls += (id -> desc) //TODO: Configurable timeout try Await.result(resultPromise.future, 30.seconds) catch { case _: TimeoutException => val other = waitingCallsAtEntry.values val sb = new StringBuilder(s"Timed out waiting for '$desc' to complete. Calls at entry: ${other.mkString("'", "', '", "'")}. Stack:\n") appendStackTraces(sb) log.debug(sb.toString()) throw new TimeoutException(s"Timed out waiting for '$desc' to complete.") } finally { // Done with this call awaitingCalls -= id log.trace(s"Done: $id") } } } private def appendStackTraces(sb: StringBuilder): Unit = { Thread.getAllStackTraces.asScala.foreach { tup => sb.append("\n> THREAD ").append(tup._1.getName).append("\n") tup._2.foreach(ste => sb.append(" ").append(ste).append("\n")) } } } }
Example 11
Source File: SparkSqlExecutor.scala From Linkis with Apache License 2.0 | 5 votes |
package com.webank.wedatasphere.linkis.engine.executors import java.lang.reflect.InvocationTargetException import java.util.concurrent.atomic.AtomicLong import com.webank.wedatasphere.linkis.common.conf.CommonVars import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils} import com.webank.wedatasphere.linkis.engine.configuration.SparkConfiguration import com.webank.wedatasphere.linkis.engine.execute.EngineExecutorContext import com.webank.wedatasphere.linkis.engine.extension.SparkSqlExtension import com.webank.wedatasphere.linkis.engine.spark.common.{Kind, SparkSQL} import com.webank.wedatasphere.linkis.engine.spark.utils.EngineUtils import com.webank.wedatasphere.linkis.scheduler.executer.{ErrorExecuteResponse, ExecuteResponse, SuccessExecuteResponse} import org.apache.commons.lang.exception.ExceptionUtils import org.apache.spark.SparkContext import org.apache.spark.sql.{DataFrame, SQLContext} SQLSession.showDF(sc, jobGroup, rdd, null, SparkConfiguration.SHOW_DF_MAX_RES.getValue,engineExecutorContext) SuccessExecuteResponse() } catch { case e: InvocationTargetException => var cause = ExceptionUtils.getCause(e) if(cause == null) cause = e error("execute sparkSQL failed!", cause) ErrorExecuteResponse(ExceptionUtils.getRootCauseMessage(e), cause) case ite: Exception => error("execute sparkSQL failed!", ite) ErrorExecuteResponse(ExceptionUtils.getRootCauseMessage(ite), ite) } finally sc.clearJobGroup() } override def kind: Kind = SparkSQL() override def open: Unit = {} override def close: Unit = {} }
Example 12
Source File: RecordConverter.scala From cuesheet with Apache License 2.0 | 5 votes |
package com.kakao.cuesheet.convert import java.lang.reflect import java.lang.reflect.InvocationTargetException import com.kakao.mango.logging.Logging import scala.reflect.runtime.universe._ object RecordConverter extends Logging { val m = runtimeMirror(getClass.getClassLoader) def isTuple(clazz: Class[_]) = clazz.getName.startsWith("scala.Tuple") def isCaseClass(clazz: Class[_]) = clazz.getInterfaces.contains(classOf[scala.Product]) def create[T](convert: Seq[_] => T) = new RecordConverter[T] { override def apply(seq: Seq[_]): T = convert(seq) } def apply[T: TypeTag]: RecordConverter[T] = { val clazz = runtimeMirror(Thread.currentThread().getContextClassLoader).runtimeClass(typeOf[T]) if (clazz.isArray) { val componentType = clazz.getComponentType val convert = Converters.converterTo(componentType) create { seq => val array = reflect.Array.newInstance(clazz.getComponentType, seq.size) for( (element, index) <- seq.zipWithIndex ) { reflect.Array.set(array, index, convert(element.asInstanceOf[AnyRef])) } array.asInstanceOf[T] } } else if (clazz == classOf[Seq[_]]) { create(_.asInstanceOf[T]) } else { val types: Seq[Class[_]] = if (isTuple(clazz)) { typeTag[T].tpe.asInstanceOf[TypeRefApi].args.map(m.runtimeClass) } else { clazz.getConstructors()(0).getParameterTypes } val converters: Seq[AnyRef => AnyRef] = types.map(Converters.converterTo) create(seq => { val params = seq.take(converters.size).zipWithIndex.map { case (obj: AnyRef, idx) => converters(idx)(obj) case _ => null // if not matched to AnyRef, it's null } try { clazz.getConstructors()(0).newInstance(params: _*).asInstanceOf[T] } catch { case e: InvocationTargetException => logger.error(s"Error while constructing class $clazz", e.getTargetException) throw e case e: IllegalArgumentException => logger.error(s"Illegal argument provided to constructor of $clazz", e) throw e case e: Throwable => logger.error(s"Unknown exception while constructing class $clazz", e) throw e } }) } } }
Example 13
Source File: AuthenticationProvider.scala From incubator-livy with Apache License 2.0 | 5 votes |
package org.apache.livy.thriftserver.auth import java.lang.reflect.InvocationTargetException import javax.security.sasl.AuthenticationException import org.apache.hive.service.auth.PasswdAuthenticationProvider import org.apache.livy.LivyConf object AuthenticationProvider { // TODO: support PAM val AUTH_METHODS = Seq("NONE", "CUSTOM", "LDAP") @throws[AuthenticationException] def getAuthenticationProvider(method: String, conf: LivyConf): PasswdAuthenticationProvider = { method match { case "NONE" => new NoneAuthenticationProvider case "CUSTOM" => new CustomAuthenticationProvider(conf) case "LDAP" => new LdapAuthenticationProviderImpl(conf) case _ => throw new AuthenticationException("Unsupported authentication method") } } } class CustomAuthenticationProvider(conf: LivyConf) extends PasswdAuthenticationProvider { private val customClass: Class[_ <: PasswdAuthenticationProvider] = { Class.forName(conf.get(LivyConf.THRIFT_CUSTOM_AUTHENTICATION_CLASS)) .asSubclass(classOf[PasswdAuthenticationProvider]) } val provider: PasswdAuthenticationProvider = { // Try first a constructor with the LivyConf as parameter, then a constructor with no parameter // of none of them is available this fails with an exception. try { customClass.getConstructor(classOf[LivyConf]).newInstance(conf) } catch { case _: NoSuchMethodException | _: InstantiationException | _: IllegalAccessException | _: InvocationTargetException => customClass.getConstructor().newInstance() } } override def Authenticate(user: String, password: String): Unit = { provider.Authenticate(user, password) } }
Example 14
Source File: SQLInterpreter.scala From incubator-livy with Apache License 2.0 | 5 votes |
package org.apache.livy.repl import java.lang.reflect.InvocationTargetException import java.sql.Date import scala.util.control.NonFatal import org.apache.spark.SparkConf import org.apache.spark.sql.Row import org.apache.spark.sql.SparkSession import org.json4s._ import org.json4s.JsonAST.{JNull, JString} import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ import org.apache.livy.Logging import org.apache.livy.rsc.RSCConf import org.apache.livy.rsc.driver.SparkEntries class SQLInterpreter( sparkConf: SparkConf, rscConf: RSCConf, sparkEntries: SparkEntries) extends Interpreter with Logging { case object DateSerializer extends CustomSerializer[Date](_ => ( { case JString(s) => Date.valueOf(s) case JNull => null }, { case d: Date => JString(d.toString) })) private implicit def formats: Formats = DefaultFormats + DateSerializer private var spark: SparkSession = null private val maxResult = rscConf.getInt(RSCConf.Entry.SQL_NUM_ROWS) override def kind: String = "sql" override def start(): Unit = { require(!sparkEntries.sc().sc.isStopped) spark = sparkEntries.sparkSession() } override protected[repl] def execute(code: String): Interpreter.ExecuteResponse = { try { val result = spark.sql(code) val schema = parse(result.schema.json) // Get the row data val rows = result.take(maxResult) .map { _.toSeq.map { // Convert java BigDecimal type to Scala BigDecimal, because current version of // Json4s doesn't support java BigDecimal as a primitive type (LIVY-455). case i: java.math.BigDecimal => BigDecimal(i) case e => e } } val jRows = Extraction.decompose(rows) Interpreter.ExecuteSuccess( APPLICATION_JSON -> (("schema" -> schema) ~ ("data" -> jRows))) } catch { case e: InvocationTargetException => warn(s"Fail to execute query $code", e.getTargetException) val cause = e.getTargetException Interpreter.ExecuteError("Error", cause.getMessage, cause.getStackTrace.map(_.toString)) case NonFatal(f) => warn(s"Fail to execute query $code", f) Interpreter.ExecuteError("Error", f.getMessage, f.getStackTrace.map(_.toString)) } } override def close(): Unit = { } }
Example 15
Source File: ReflectiveDynamicAccess.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.actor import scala.collection.immutable import java.lang.reflect.InvocationTargetException import scala.reflect.ClassTag import scala.util.Try class ReflectiveDynamicAccess(val classLoader: ClassLoader) extends DynamicAccess { override def getClassFor[T: ClassTag](fqcn: String): Try[Class[_ <: T]] = Try[Class[_ <: T]]({ val c = Class.forName(fqcn, false, classLoader).asInstanceOf[Class[_ <: T]] val t = implicitly[ClassTag[T]].runtimeClass if (t.isAssignableFrom(c)) c else throw new ClassCastException(t + " is not assignable from " + c) }) override def createInstanceFor[T: ClassTag](clazz: Class[_], args: immutable.Seq[(Class[_], AnyRef)]): Try[T] = Try { val types = args.map(_._1).toArray val values = args.map(_._2).toArray val constructor = clazz.getDeclaredConstructor(types: _*) constructor.setAccessible(true) val obj = constructor.newInstance(values: _*) val t = implicitly[ClassTag[T]].runtimeClass if (t.isInstance(obj)) obj.asInstanceOf[T] else throw new ClassCastException(clazz.getName + " is not a subtype of " + t) } recover { case i: InvocationTargetException if i.getTargetException ne null ⇒ throw i.getTargetException } override def createInstanceFor[T: ClassTag](fqcn: String, args: immutable.Seq[(Class[_], AnyRef)]): Try[T] = getClassFor(fqcn) flatMap { c ⇒ createInstanceFor(c, args) } override def getObjectFor[T: ClassTag](fqcn: String): Try[T] = { val classTry = if (fqcn.endsWith("$")) getClassFor(fqcn) else getClassFor(fqcn + "$") recoverWith { case _ ⇒ getClassFor(fqcn) } classTry flatMap { c ⇒ Try { val module = c.getDeclaredField("MODULE$") module.setAccessible(true) val t = implicitly[ClassTag[T]].runtimeClass module.get(null) match { case null ⇒ throw new NullPointerException case x if !t.isInstance(x) ⇒ throw new ClassCastException(fqcn + " is not a subtype of " + t) case x: T ⇒ x } } recover { case i: InvocationTargetException if i.getTargetException ne null ⇒ throw i.getTargetException } } } }
Example 16
Source File: MetricLoggerTest.scala From airframe with Apache License 2.0 | 5 votes |
package wvlet.airframe.fluentd import java.lang.reflect.InvocationTargetException import wvlet.airframe.{Design, fluentd} import wvlet.airspec.AirSpec case class SampleMetric(time: Long, value: String) extends TaggedMetric { def metricTag = "sample" } case class NestedMetric(message: String, data: Seq[Int], opt: Option[String], sample: SampleMetric) extends TaggedMetric { def metricTag = "nested" } case class ErrorMetric(errorType: String, ex: Exception) extends TaggedMetric { def metricTag = "error_log" } class MetricLoggerTest extends AirSpec { override protected def design: Design = { fluentd.withDebugConsoleLogging } def `generate MetricLogger for case classes`(f: MetricLoggerFactory): Unit = { val l = f.getTypedLogger[SampleMetric] l.emit(SampleMetric(100000, "hello")) l.emit(SampleMetric(100001, "fluentd")) val ll = f.getTypedLoggerWithTagPrefix[NestedMetric]("extended") ll.emit(NestedMetric("test nested logs", Seq(1, 2, 3), None, SampleMetric(100002, "I'm happy"))) } def `support nested metrics`(f: MetricLoggerFactory): Unit = { val l = f.getTypedLogger[NestedMetric] l.emit(NestedMetric("test nested logs", Seq(1, 2, 3), None, SampleMetric(100002, "I'm happy"))) l.emit( NestedMetric( "test options", Seq(10, 20), Some("optional value"), SampleMetric(100003, "option value is also supported") ) ) } def `support exception stack trace metrics`(f: MetricLoggerFactory): Unit = { val l = f.getTypedLogger[ErrorMetric] l.emit(ErrorMetric("illegal_argument", new IllegalArgumentException("invalid input"))) l.emit(ErrorMetric("remote error", new InvocationTargetException(new IllegalStateException("unknown error")))) } }
Example 17
Source File: AnnotatedRugFunction.scala From rug with GNU General Public License v3.0 | 5 votes |
package com.atomist.rug.spi import java.lang.reflect.{InvocationTargetException, Method} import com.atomist.param.{Parameter, ParameterValues, Tag} import org.springframework.core.annotation.AnnotationUtils private def convert(param: java.lang.reflect.Parameter, avalue: Any): AnyRef = { avalue match { case o: String => (param.getType, o) match { case (p, "") if p == classOf[Int] => new Integer(0) case (p, _) if p == classOf[Int] => o.toInt.asInstanceOf[AnyRef] case (p, "") if p == classOf[Integer] => new Integer(0) case (p, _) if p == classOf[Integer] => Integer.parseInt(o).asInstanceOf[AnyRef] case (p, "") if p == classOf[Boolean] => false.asInstanceOf[AnyRef] case (p , _) if p == classOf[Boolean] => o.toBoolean.asInstanceOf[AnyRef] case (p, "") if p == classOf[Double] => 0d.asInstanceOf[AnyRef] case (p, _) if p == classOf[Double] => o.toDouble.asInstanceOf[AnyRef] case (p, "") if p == classOf[java.lang.Double] => 0d.asInstanceOf[AnyRef] case (p, _) if p == classOf[java.lang.Double] => java.lang.Double.parseDouble(o).asInstanceOf[AnyRef] case (p, "") if p == classOf[Float] => 0f.asInstanceOf[AnyRef] case (p, _) if p == classOf[Float] => o.toFloat.asInstanceOf[AnyRef] case (p, "") if p == classOf[java.lang.Float] => 0f.asInstanceOf[AnyRef] case (p, _) if p == classOf[java.lang.Float] => java.lang.Float.parseFloat(o).asInstanceOf[AnyRef] case (p, _) if p == classOf[String] => o } case _ => avalue.asInstanceOf[AnyRef] } } }
Example 18
Source File: TypeOperation.scala From rug with GNU General Public License v3.0 | 5 votes |
package com.atomist.rug.spi import java.lang.reflect.InvocationTargetException import com.atomist.rug.RugRuntimeException import com.atomist.rug.runtime.js.interop.NashornUtils import com.atomist.tree.TreeNode import com.atomist.tree.content.text.OutOfDateNodeException def invoke(target: Object, rawArgs: Seq[AnyRef]): Object = { val args = rawArgs.map(a => NashornUtils.toJavaType(a)) // Include TreeNode methods, although the annotations won't be inherited val methods = target.getClass.getMethods.toSeq.filter(m => this.name.equals(m.getName) && (m.getDeclaredAnnotations.exists(_.isInstanceOf[ExportFunction]) || TreeNodeOperations.contains(m.getName)) && this.parameters.size == m.getParameterCount ) if (methods.size != 1) throw new IllegalArgumentException( s"Operation [$name] cannot be invoked on [${target.getClass.getName}]: Found ${methods.size} definitions with ${parameters.size}, required exactly 1: " + s"Known methods=[${methods.mkString(",")}]" ) try { methods.head.invoke(target, args: _*) } catch { case e: InvocationTargetException if e.getCause.isInstanceOf[OutOfDateNodeException] => throw e.getCause // we meant to do this case t: Throwable => val argDiagnostics = args map { case null => "null" case o => s"$o: ${o.getClass}" } throw new RugRuntimeException(null, s"Exception invoking ${methods.head} with args=${argDiagnostics.mkString(",")}: ${t.getMessage}", t) } } } object TypeOperation { val TreeNodeAllTypeOperations: Seq[TypeOperation] = new ReflectiveTypeOperationFinder(classOf[TreeNode]).allOperations val TreeNodeTypeOperations: Seq[TypeOperation] = new ReflectiveTypeOperationFinder(classOf[TreeNode]).operations val TreeNodeType = new Typed { override val name = "TreeNode" override def description: String = "TreeNode operations" override def allOperations = TreeNodeAllTypeOperations override def operations = TreeNodeTypeOperations } val TreeNodeOperations: Set[String] = TreeNodeAllTypeOperations.map(_.name).toSet }
Example 19
Source File: package.scala From sbt-reactive-app with Apache License 2.0 | 5 votes |
package com.lightbend.rp.sbtreactiveapp import java.lang.reflect.InvocationTargetException import scala.reflect.ClassTag import scala.util.Try package object magic { def getSingletonObject[T: ClassTag](classLoader: ClassLoader, className: String): Try[T] = Try { val clazz = classLoader.loadClass(className) val t = implicitly[ClassTag[T]].runtimeClass clazz.getField("MODULE$").get(null) match { case null => throw new ClassNotFoundException(s"Unable to find $className using classloader: $classLoader") case c if !t.isInstance(c) => throw new ClassCastException(s"${clazz.getName} is not a subtype of $t") case c: T => c } }.recover { case i: InvocationTargetException if i.getTargetException != null => throw i.getTargetException } def objectExists(classLoader: ClassLoader, className: String): Boolean = try { classLoader.loadClass(className).getField("MODULE$").get(null) != null } catch { case _: Exception => false } def withContextClassloader[T](loader: ClassLoader)(body: ClassLoader => T): T = { val current = Thread.currentThread().getContextClassLoader try { Thread.currentThread().setContextClassLoader(loader) body(loader) } finally Thread.currentThread().setContextClassLoader(current) } }