scala.tools.reflect.ToolBox Scala Examples
The following examples show how to use scala.tools.reflect.ToolBox.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: Annotation.scala From morpheus with Apache License 2.0 | 6 votes |
package org.opencypher.morpheus.impl.util import org.opencypher.morpheus.api.io.{Labels, Node, Relationship, RelationshipType} import scala.annotation.StaticAnnotation import scala.reflect.runtime.universe._ import scala.tools.reflect.ToolBox private[morpheus] object Annotation { def labels[E <: Node: TypeTag]: Set[String] = synchronized { get[Labels, E] match { case Some(ls) => ls.labels.toSet case None => Set(runtimeClass[E].getSimpleName) } } def relType[E <: Relationship: TypeTag]: String = synchronized { get[RelationshipType, E] match { case Some(RelationshipType(tpe)) => tpe case None => runtimeClass[E].getSimpleName.toUpperCase } } def get[A <: StaticAnnotation: TypeTag, E: TypeTag]: Option[A] = synchronized { val maybeAnnotation = staticClass[E].annotations.find(_.tree.tpe =:= typeOf[A]) maybeAnnotation.map { annotation => val tb = typeTag[E].mirror.mkToolBox() val instance = tb.eval(tb.untypecheck(annotation.tree)).asInstanceOf[A] instance } } private def runtimeClass[E: TypeTag]: Class[E] = synchronized { val tag = typeTag[E] val mirror = tag.mirror val runtimeClass = mirror.runtimeClass(tag.tpe.typeSymbol.asClass) runtimeClass.asInstanceOf[Class[E]] } private def staticClass[E: TypeTag]: ClassSymbol = synchronized { val mirror = typeTag[E].mirror mirror.staticClass(runtimeClass[E].getCanonicalName) } }
Example 2
Source File: RefineMacroBenchmark.scala From refined with MIT License | 5 votes |
package eu.timepit.refined.benchmark import org.openjdk.jmh.annotations._ import scala.reflect.runtime.currentMirror import scala.tools.reflect.ToolBox @BenchmarkMode(Array(Mode.AverageTime)) @State(Scope.Thread) class RefineMacroBenchmark { private val toolBox = currentMirror.mkToolBox() private val autoRefineV_PosInt_tree = toolBox.parse(""" import eu.timepit.refined.auto.autoRefineV import eu.timepit.refined.types.numeric.PosInt val x: PosInt = 1 """) @Benchmark def autoRefineV_PosInt: Any = toolBox.eval(autoRefineV_PosInt_tree) }
Example 3
Source File: InferMacroBenchmark.scala From refined with MIT License | 5 votes |
package eu.timepit.refined.benchmark import org.openjdk.jmh.annotations._ import scala.reflect.runtime.currentMirror import scala.tools.reflect.ToolBox @BenchmarkMode(Array(Mode.AverageTime)) @State(Scope.Thread) class InferMacroBenchmark { private val toolBox = currentMirror.mkToolBox() private val autoInfer_Greater_tree = toolBox.parse(""" import eu.timepit.refined.W import eu.timepit.refined.api.Refined import eu.timepit.refined.auto.autoInfer import eu.timepit.refined.numeric.Greater val a: Int Refined Greater[W.`5`.T] = Refined.unsafeApply(10) val b: Int Refined Greater[W.`0`.T] = a """) @Benchmark def autoInfer_Greater: Any = toolBox.eval(autoInfer_Greater_tree) }
Example 4
Source File: eval.scala From refined with MIT License | 5 votes |
package eu.timepit.refined import eu.timepit.refined.api.Validate import scala.reflect.runtime.currentMirror import scala.tools.reflect.ToolBox import shapeless.Witness object eval { final case class Eval[S](s: S) object Eval { // Cache ToolBox for Eval Validate instances private lazy val toolBox = currentMirror.mkToolBox() implicit def evalValidate[T, S <: String]( implicit mt: Manifest[T], ws: Witness.Aux[S] ): Validate.Plain[T, Eval[S]] = { // The ascription (T => Boolean) allows to omit the parameter // type in ws.value (i.e. "x => ..." instead of "(x: T) => ..."). val tree = toolBox.parse(s"(${ws.value}): ($mt => Boolean)") val predicate = toolBox.eval(tree).asInstanceOf[T => Boolean] Validate.fromPredicate(predicate, _ => ws.value, Eval(ws.value)) } } }
Example 5
Source File: S2Directive.scala From incubator-s2graph with Apache License 2.0 | 5 votes |
package org.apache.s2graph.graphql.types import sangria.schema.{Argument, Directive, DirectiveLocation, StringType} object S2Directive { object Eval { import scala.collection._ val codeMap = mutable.Map.empty[String, () => Any] def compileCode(code: String): () => Any = { import scala.tools.reflect.ToolBox val toolbox = reflect.runtime.currentMirror.mkToolBox() toolbox.compile(toolbox.parse(code)) } def getCompiledCode[T](code: String): T = { val compiledCode = Eval.codeMap.getOrElseUpdate(code, Eval.compileCode(code)) compiledCode .asInstanceOf[() => Any] .apply() .asInstanceOf[T] } } type TransformFunc = String => String val funcArg = Argument("func", StringType) val Transform = Directive("transform", arguments = List(funcArg), locations = Set(DirectiveLocation.Field), shouldInclude = _ => true) def resolveTransform(code: String, input: String): String = { try { val fn = Eval.getCompiledCode[TransformFunc](code) fn.apply(input) } catch { case e: Exception => e.toString } } }
Example 6
Source File: TestFlinkGenerator.scala From milan with Apache License 2.0 | 5 votes |
package com.amazon.milan.compiler.flink.generator import com.amazon.milan.application.ApplicationConfiguration import com.amazon.milan.application.sources.S3DataSource import com.amazon.milan.dataformats.JsonDataInputFormat import com.amazon.milan.compiler.flink.testing.{IntRecord, TestApplicationExecutor} import com.amazon.milan.lang._ import com.amazon.milan.testing.applications._ import org.junit.Assert._ import org.junit.Test import scala.reflect.runtime.universe import scala.tools.reflect.ToolBox @Test class TestFlinkGenerator { private val generator = new FlinkGenerator(GeneratorConfig()) @Test def test_FlinkGenerator_GenerateScala_WithListSourceAndMapOfOneRecord_GeneratesCodeThatCompilesAndOutputsMappedRecord(): Unit = { val input = Stream.of[IntRecord].withName("input") val output = input.map(r => IntRecord(r.i + 1)).withName("output") val graph = new StreamGraph(output) val config = new ApplicationConfiguration config.setListSource(input, IntRecord(1)) val result = TestApplicationExecutor.executeApplication(graph, config, 10, output) val outputRecords = result.getRecords(output) assertEquals(List(IntRecord(2)), outputRecords) } @Test def test_FlinkGenerator_GenerateScala_WithS3DataSource_GeneratesCodeThatCompiles(): Unit = { val input = Stream.of[IntRecord].withName("input") val output = input.map(r => IntRecord(r.i + 1)).withName("output") val graph = new StreamGraph(output) val config = new ApplicationConfiguration config.setSource(input, new S3DataSource[IntRecord]("bucket", "prefix", new JsonDataInputFormat[IntRecord]())) val generatedCode = this.generator.generateScala(graph, config, "", "TestApp") this.eval(generatedCode) } private def eval(code: String): Any = { try { val tb = ToolBox(universe.runtimeMirror(this.getClass.getClassLoader)).mkToolBox() val tree = tb.parse(code) tb.eval(tree) } catch { case ex: Throwable => Console.println(code) throw ex } } }
Example 7
Source File: Traverser.scala From scaldy with Apache License 2.0 | 5 votes |
package com.paytrue.scaldy import java.nio.charset.StandardCharsets import java.nio.file.Path import scala.io.Source import scala.reflect.runtime.currentMirror import scala.reflect.runtime.universe.Flag._ import scala.reflect.runtime.universe._ import scala.tools.reflect.ToolBox class ClassDefTraverser(file: Path) extends Traverser { var classes: List[BeanClass] = List.empty override def traverse(tree: Tree) = { tree match { case ClassDef(mods, name, _, impl) ⇒ val valTraverser = new ValDefTraverser valTraverser.traverse(tree) val parents = impl.parents.map(_.toString()) classes = classes :+ BeanClass(name.toString, valTraverser.properties, parents, isAbstract = mods.hasFlag(ABSTRACT), isTrait = mods.hasFlag(TRAIT), sourceFile = file) case _ ⇒ } super.traverse(tree) } } class ValDefTraverser extends Traverser { var properties: List[Property] = List.empty override def traverse(tree: Tree) = { tree match { case ValDef(Modifiers(_, _, annotations), valName, tpt, _) if hasBeanProperty(annotations) ⇒ val isRequired = hasRequired(annotations) tpt match { case AppliedTypeTree(Select(qualifier, typeName), args) ⇒ val typeTraverser = new TypeArgsTraverser typeTraverser.traverseTrees(args) properties :+= Property(valName.toString, tpt.toString(), typeTraverser.refTypes, isRequired) case _ ⇒ properties :+= Property(valName.toString, tpt.toString(), Set.empty, isRequired) } case _ ⇒ } super.traverse(tree) } private def hasBeanProperty(annotations: List[Tree]) = annotations.exists { case Apply(Select(New(Ident(TypeName("BeanProperty"))), _), _) ⇒ true case _ ⇒ false } private def hasRequired(annotations: List[Tree]) = annotations.exists { case Apply(Select(New(Annotated(_, Ident(TypeName("Required")))), _), _) ⇒ true case _ ⇒ false } } class TypeArgsTraverser extends Traverser { var refTypes: Set[String] = Set.empty override def traverse(tree: Tree) = { tree match { case ident @ Ident(identName) if ident.isType ⇒ refTypes += identName.toString case _ ⇒ } super.traverse(tree) } } object FileClassFinder { def getClassesFromFile(file: Path): List[BeanClass] = { val toolbox = currentMirror.mkToolBox() val fileContents = Source.fromFile(file.toString, StandardCharsets.UTF_8.name()).getLines().drop(1).mkString("\n") val tree = toolbox.parse(fileContents) val traverser = new ClassDefTraverser(file) traverser.traverse(tree) traverser.classes } }
Example 8
Source File: Analyser.scala From Raphtory with Apache License 2.0 | 5 votes |
package com.raphtory.core.analysis.API import akka.actor.ActorContext import com.raphtory.core.analysis.API.GraphLenses.GraphLens import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.io.Source import scala.reflect.runtime.currentMirror import scala.tools.reflect.ToolBox case class ManagerCount(count: Int) case class WorkerID(ID: Int) class BlankAnalyser(args:Array[String]) extends Analyser(args) { override def analyse(): Unit = {} override def setup(): Unit = {} override def returnResults(): Any = {} override def defineMaxSteps(): Int = 1 override def processResults(results: ArrayBuffer[Any], timeStamp: Long, viewCompleteTime: Long): Unit = {println("howdy!")} } case class LoadExternalAnalyser(rawFile: String,args:Array[String]) { private val toolbox = currentMirror.mkToolBox() private val tree = toolbox.parse(rawFile) private val compiledCode = toolbox.compile(tree).apply().asInstanceOf[Class[Analyser]] def newAnalyser = compiledCode.getConstructor(classOf[Array[String]]).newInstance(args).asInstanceOf[Analyser] } abstract class Analyser(args:Array[String]) extends java.io.Serializable { implicit var context: ActorContext = null implicit var managerCount: ManagerCount = null implicit var proxy: GraphLens = null var workerID: Int = 0 private var toPublish:mutable.ArrayBuffer[String] = ArrayBuffer() final def sysSetup(context: ActorContext, managerCount: ManagerCount, proxy: GraphLens, ID: Int) = { this.context = context this.managerCount = managerCount this.proxy = proxy this.workerID = ID } def publishData(data:String) = toPublish +=data def getPublishedData() = toPublish.toArray def clearPublishedData() = toPublish = ArrayBuffer() def analyse(): Unit def setup(): Unit def returnResults(): Any def defineMaxSteps(): Int def processResults(results: ArrayBuffer[Any], timeStamp: Long, viewCompleteTime: Long): Unit def processViewResults(results: ArrayBuffer[Any], timestamp: Long, viewCompleteTime: Long): Unit = processResults(results, timestamp: Long, viewCompleteTime: Long) def processWindowResults(results: ArrayBuffer[Any], timestamp: Long, windowSize: Long, viewCompleteTime: Long): Unit = processResults(results, timestamp: Long, viewCompleteTime: Long) def processBatchWindowResults( results: ArrayBuffer[Any], timestamp: Long, windowSet: Array[Long], viewCompleteTime: Long ): Unit = processResults(results, timestamp: Long, viewCompleteTime: Long) }
Example 9
Source File: JsonLifter.scala From diffy with GNU Affero General Public License v3.0 | 5 votes |
package ai.diffy.lifter import com.fasterxml.jackson.core.{JsonGenerator, JsonToken} import com.fasterxml.jackson.databind.annotation.JsonSerialize import com.fasterxml.jackson.databind.ser.std.StdSerializer import com.fasterxml.jackson.databind.{JsonNode, ObjectMapper, SerializerProvider} import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper import com.twitter.util.Try import scala.collection.JavaConversions._ import scala.language.postfixOps import scala.reflect.runtime.universe.runtimeMirror import scala.tools.reflect.ToolBox import scala.util.control.NoStackTrace object JsonLifter { @JsonSerialize(using = classOf[JsonNullSerializer]) object JsonNull object JsonParseError extends Exception with NoStackTrace val toolbox = runtimeMirror(getClass.getClassLoader).mkToolBox() val Mapper = new ObjectMapper with ScalaObjectMapper Mapper.registerModule(DefaultScalaModule) def apply(obj: Any): JsonNode = Mapper.valueToTree(obj) def lift(node: JsonNode): Any = node.asToken match { case JsonToken.START_ARRAY => node.elements.toSeq.map { element => lift(element) } case JsonToken.START_OBJECT => { val fields = node.fieldNames.toSet if (fields.exists{ field => Try(toolbox.parse(s"object ${field}123")).isThrow}) { node.fields map {field => (field.getKey -> lift(field.getValue))} toMap } else { FieldMap( node.fields map {field => (field.getKey -> lift(field.getValue))} toMap ) } } case JsonToken.VALUE_FALSE => false case JsonToken.VALUE_NULL => JsonNull case JsonToken.VALUE_NUMBER_FLOAT => node.asDouble case JsonToken.VALUE_NUMBER_INT => node.asLong case JsonToken.VALUE_TRUE => true case JsonToken.VALUE_STRING => node.textValue case _ => throw JsonParseError } def decode(json: String): JsonNode = Mapper.readTree(json) def encode(item: Any): String = Mapper.writer.writeValueAsString(item) } class JsonNullSerializer(clazz: Class[Any]) extends StdSerializer[Any](clazz) { def this() { this(null) } override def serialize(t: Any, jsonGenerator: JsonGenerator, serializerProvider: SerializerProvider): Unit = { jsonGenerator.writeNull() } }
Example 10
Source File: ASTMatchers.scala From Argus with MIT License | 5 votes |
package argus.macros import org.scalactic.Equality import org.scalatest.matchers.{ MatchResult, Matcher } import scala.tools.reflect.ToolBox trait ASTMatchers { val runtimeUniverse = scala.reflect.runtime.universe import runtimeUniverse._ import scala.reflect.runtime.currentMirror val toolbox = currentMirror.mkToolBox() // For testing equality between trees in tests implicit val treeEq = new Equality[Tree] { def areEqual(a: Tree, b: Any): Boolean = b match { // equalsStructure bug: https://github.com/scalamacros/paradise/issues/80 case c: Tree => showRaw(a) == showRaw(c) //.equalsStructure(c) case _ => false } } implicit val valDefEq = new Equality[ValDef] { def areEqual(a: ValDef, b: Any): Boolean = b match { case c: ValDef => showRaw(a) == showRaw(c) case _ => false } } implicit val listTreeEq = new Equality[List[Tree]] { def areEqual(a: List[Tree], b: Any): Boolean = b match { case c: List[_] => a.size == c.size && a.zip(c).forall { case(x,y) => treeEq.areEqual(x,y) } case _ => false } } val extractCodecNameAndType: PartialFunction[Tree, (String, String)] = { case q"implicit val $name: $typ = $_" => (name.toString, typ.toString) } }
Example 11
Source File: Eval.scala From lift with MIT License | 5 votes |
package opencl.executor import ir.ast.Lambda import lift.arithmetic._ import scala.reflect.runtime._ import scala.tools.reflect.ToolBox object Eval { def apply(code: String): Lambda = { eval(code).asInstanceOf[Lambda] } def getMethod(code:String): Seq[ArithExpr] => Lambda = { eval(code).asInstanceOf[Seq[ArithExpr] => Lambda] } def eval(code: String): Any = { val mirror = universe.runtimeMirror(getClass.getClassLoader) val tb = mirror.mkToolBox() val tree = tb.parse(s""" |import arithmetic._ |import lift.arithmetic._ |import lift.arithmetic.simplifier._ |import ir._ |import ir.ast._ |import opencl.ir._ |import opencl.ir.pattern._ |import opencl.ir.ast._ |$code """.stripMargin) tb.eval(tree) } }
Example 12
Source File: util.scala From lacasa with BSD 3-Clause "New" or "Revised" License | 5 votes |
package lacasa import reflect._ import tools.reflect.{ToolBox, ToolBoxError} object util { implicit class objectops(obj: Any) { def mustBe(other: Any) = assert(obj == other, obj + " is not " + other) def mustEqual(other: Any) = mustBe(other) } implicit class stringops(text: String) { def mustContain(substring: String) = assert(text contains substring, text) } def intercept[T <: Throwable : ClassTag](body: => Any): T = try { body throw new Exception(s"Exception of type ${classTag[T]} was not thrown") } catch { case t: Throwable => Box.uncheckedCatchControl if (classTag[T].runtimeClass != t.getClass) throw t else t.asInstanceOf[T] } def eval(code: String, compileOptions: String = ""): Any = { val tb = mkToolbox(compileOptions) tb.eval(tb.parse(code)) } def mkToolbox(compileOptions: String = ""): ToolBox[_ <: scala.reflect.api.Universe] = { val m = scala.reflect.runtime.currentMirror import scala.tools.reflect.ToolBox m.mkToolBox(options = compileOptions) } def scalaBinaryVersion: String = { val Pattern = """(\d+\.\d+)\..*""".r scala.util.Properties.versionNumberString match { case Pattern(v) => v case _ => "" } } def toolboxClasspath: String = { val f = new java.io.File(s"core/target/scala-${scalaBinaryVersion}/classes") if (!f.exists) sys.error(s"output directory ${f.getAbsolutePath} does not exist.") f.getAbsolutePath } def sporesClasspath: String = { val f = new java.io.File(s"lib/spores-core_2.11.jar") if (!f.exists) sys.error(s"jar file ${f.getAbsolutePath} does not exist.") f.getAbsolutePath } def pluginPath: String = { val path = java.lang.System.getProperty("lacasa.plugin.jar") val f = new java.io.File(path) val absPath = f.getAbsolutePath println(s"LaCasa plugin path: $absPath") absPath } def expectError(errorSnippet: String, compileOptions: String = "", baseCompileOptions: String = s"-cp ${toolboxClasspath}:${sporesClasspath} -Xplugin:${pluginPath} -P:lacasa:enable")(code: String) { intercept[ToolBoxError] { eval(code, compileOptions + " " + baseCompileOptions) }.getMessage mustContain errorSnippet } }
Example 13
Source File: CompileUtil.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.util import org.scalatest.Matchers._ import scala.tools.reflect.{ToolBox, ToolBoxError} object CompileUtil { def interceptEval(code: String): ToolBoxError = { intercept[ToolBoxError](eval(code)) } def eval(code: String): Any = { val tb = mkToolbox() tb.eval(tb.parse(code)) } def mkToolbox(compileOptions: String = ""): ToolBox[_ <: scala.reflect.api.Universe] = { val m = scala.reflect.runtime.currentMirror m.mkToolBox(options = compileOptions) } }
Example 14
Source File: SparkNarrowTest.scala From spark-tools with Apache License 2.0 | 3 votes |
package io.univalence import java.net.URLClassLoader import java.sql.Date import io.univalence.centrifuge.Sparknarrow import org.apache.spark.SparkConf import org.apache.spark.sql.types._ import org.apache.spark.sql.Encoders import org.apache.spark.sql.SparkSession import org.scalatest.FunSuite case class Person(name: String, age: Int, date: Date) class SparknarrowTest extends FunSuite { val conf: SparkConf = new SparkConf() conf.setAppName("yo") conf.set("spark.sql.caseSensitive", "true") conf.setMaster("local[2]") implicit val ss: SparkSession = SparkSession.builder.config(conf).getOrCreate import ss.implicits._ test("testBasicCC") { val classDef = Sparknarrow.basicCC(Encoders.product[Person].schema).classDef checkDefinition(classDef) } def checkDefinition(scalaCode: String): Unit = { //TODO do a version for 2.11 and 2.12 } test("play with scala eval") { val code = """ case class Tata(str: String) case class Toto(age: Int, tata: Tata) """ checkDefinition(code) checkDefinition(code) } ignore("printSchema StructType") { val yo = StructType( Seq( StructField("name", StringType), StructField("tel", ArrayType(StringType)) ) ) yo.printTreeString() } }