scala.collection.Seq Scala Examples
The following examples show how to use scala.collection.Seq.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ScExtendsBlockElementType.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi.stubs.elements import com.intellij.lang.ASTNode import com.intellij.psi.PsiElement import com.intellij.psi.stubs.{IndexSink, StubElement, StubInputStream, StubOutputStream} import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScParameterizedTypeElement, ScParenthesisedTypeElement, ScReferenceableInfixTypeElement, ScSimpleTypeElement, ScTypeElement} import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScExtendsBlock import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.templates.ScExtendsBlockImpl import org.jetbrains.plugins.scala.lang.psi.stubs.ScExtendsBlockStub import org.jetbrains.plugins.scala.lang.psi.stubs.elements.ScExtendsBlockElementType.directSupersNames import org.jetbrains.plugins.scala.lang.psi.stubs.impl.ScExtendsBlockStubImpl import org.jetbrains.plugins.scala.lang.psi.stubs.index.ScalaIndexKeys.SUPER_CLASS_NAME_KEY import scala.annotation.tailrec import scala.collection.Seq class ScExtendsBlockElementType extends ScStubElementType[ScExtendsBlockStub, ScExtendsBlock]("extends block") { override def serialize(stub: ScExtendsBlockStub, dataStream: StubOutputStream): Unit = { dataStream.writeNames(stub.baseClasses) } override def deserialize(dataStream: StubInputStream, parentStub: StubElement[_ <: PsiElement]): ScExtendsBlockStub = { new ScExtendsBlockStubImpl(parentStub, this, baseClassesRefs = dataStream.readNames) } override def createStubImpl(block: ScExtendsBlock, parentStub: StubElement[_ <: PsiElement]): ScExtendsBlockStub = new ScExtendsBlockStubImpl(parentStub, this, baseClassesRefs = directSupersNames(block).toArray.asReferences) override def indexStub(stub: ScExtendsBlockStub, sink: IndexSink): Unit = this.indexStub(stub.baseClasses, sink, SUPER_CLASS_NAME_KEY) override def createElement(node: ASTNode): ScExtendsBlock = new ScExtendsBlockImpl(node) override def createPsi(stub: ScExtendsBlockStub): ScExtendsBlock = new ScExtendsBlockImpl(stub) } private object ScExtendsBlockElementType { def directSupersNames(extBlock: ScExtendsBlock): Seq[String] = { @tailrec def refName(te: ScTypeElement): Option[String] = { te match { case simpleType: ScSimpleTypeElement => simpleType.reference.map(_.refName) case infixType: ScReferenceableInfixTypeElement => Option(infixType.reference).map(_.refName) case x: ScParameterizedTypeElement => refName(x.typeElement) case x: ScParenthesisedTypeElement => x.typeElement match { case Some(e) => refName(e) case _ => None } case _ => None } } def default: Seq[String] = if (extBlock.isUnderCaseClass) caseClassDefaults else defaultParents extBlock.templateParents match { case None => Seq.empty case Some(parents) => val parentElements: Seq[ScTypeElement] = parents.typeElements parentElements.flatMap(refName) ++ default } } private val defaultParents = "Object" :: "ScalaObject" :: Nil private val caseClassDefaults = defaultParents ::: "Product" :: "Serializable" :: Nil }
Example 2
Source File: DataGens.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector import java.math.BigDecimal import java.{ sql => jsql } import java.util.Calendar import scala.collection.Seq import scala.util.Try import org.apache.spark.sql.Row import org.apache.spark.sql.types._ import org.scalacheck.Gen import com.actian.spark_vector.colbuffer.util.MillisecondsInDay import java.math.RoundingMode object DataGens { import com.actian.spark_vector.DataTypeGens._ import org.scalacheck.Arbitrary._ import org.scalacheck.Gen._ import scala.collection.JavaConverters._ val DefaultMaxRows = 500 val booleanGen: Gen[Boolean] = arbitrary[Boolean] val byteGen: Gen[Byte] = arbitrary[Byte] val shortGen: Gen[Short] = arbitrary[Short] val intGen: Gen[Int] = arbitrary[Int] val longGen: Gen[Long] = arbitrary[Long] // FIXME allow arbitrary doubles (and filter externally for vector tests) val floatGen: Gen[Float] = arbitrary[Float].map(f => if (f.abs > 1e-38) f else 0.0f) // FIXME allow arbitrary doubles (and filter externally for vector tests) val doubleGen: Gen[Double] = for { neg <- arbitrary[Boolean] digits <- listOfN(12, choose(0, 9)) } yield s"${if (neg) "-" else ""}1.${digits.mkString("")}".toDouble val decimalGen: Gen[BigDecimal] = arbitrary[scala.BigDecimal].retryUntil(bd => bd.scale <= 12 && bd.scale >= 0 && bd.precision <= 26 && Try { new BigDecimal(bd.toString) }.isSuccess).map(bd => new BigDecimal(bd.toString)) private val dateValueGen: Gen[Long] = choose(-3600L * 1000 * 24 * 100000L, 3600L * 1000 * 24 * 100000L) // @note normalize getTime so that we don't have diffs more than 1 day in between our {JDBC,Spark}results val dateGen: Gen[jsql.Date] = dateValueGen.map(d => new jsql.Date(d / MillisecondsInDay * MillisecondsInDay)) val timestampGen: Gen[jsql.Timestamp] = for (ms <- dateValueGen) yield new jsql.Timestamp(ms) // FIXME allow empty strings (and filter externally for vector tests) // @note we do not allow invalid UTF8 chars to be generated (from D800 to DFFF incl) val stringGen: Gen[String] = listOfN(choose(1, 512).sample.getOrElse(1), arbitrary[Char]).map(_.mkString).map( s => s.filter(c => Character.isDefined(c) && c != '\u0000' && (c < '\uD800' || c > '\uDFFF')) ) def valueGen(dataType: DataType): Gen[Any] = dataType match { case BooleanType => booleanGen case ByteType => byteGen case ShortType => shortGen case IntegerType => intGen case LongType => longGen case FloatType => floatGen case DoubleType => doubleGen case TimestampType => timestampGen case DateType => dateGen case StringType => stringGen case _: DecimalType => decimalGen case _ => throw new Exception("Invalid data type.") } def nullableValueGen(field: StructField): Gen[Any] = { val gen = valueGen(field.dataType) if (field.nullable) frequency(1 -> gen, 10 -> const(null)) else gen } def rowGen(schema: StructType): Gen[Row] = sequence(schema.fields.map(f => nullableValueGen(f))).map(l => Row.fromSeq(l.asScala)) // TODO Huh? Why ju.ArrayList?!? def dataGenFor(schema: StructType, maxRows: Int): Gen[Seq[Row]] = for { numRows <- choose(1, maxRows) rows <- listOfN(numRows, rowGen(schema)) } yield rows case class TypedData(dataType: StructType, data: Seq[Row]) val dataGen: Gen[TypedData] = for { schema <- schemaGen data <- dataGenFor(schema, DefaultMaxRows) } yield TypedData(schema, data) val allDataGen: Gen[TypedData] = for { schema <- allTypesSchemaGen data <- dataGenFor(schema, DefaultMaxRows) } yield TypedData(schema, data) }
Example 3
Source File: store.scala From playground-binding.scala with MIT License | 5 votes |
package com.ccm.me.playground.bindingscala.treeview import scala.collection.Seq import scala.concurrent.{Future, Promise} import scala.scalajs.js.timers import scala.util.Random // minimal business model implementation object FileStore { // data for the store (compact representation) val data = List("1", "0", "ontologies", 'd,"2", "1", "agency", 'd,"3", "2", "agency-ontology.ttl", 'f,"4", "2", "agency-ontology.html", 'f,"6", "2", "agency.png", 'f,"7", "2", "agency.graffle", 'f,"8", "1", "examples", 'd,"9", "8", "xml example.xml", 'f,"10", "8", "gw-mp-record.ttl", 'f,"11", "1", "assets", 'd,"12", "11", "css.css", 'f,"13", "1", "place", 'd,"14", "13", "place-ontology.html", 'f,"15", "13", "place.graffle", 'f,"17", "13", "place.png", 'f,"18", "13", "place-ontology.ttl", 'f,"19", "1", "concept", 'd,"20", "19", "concept-ontology.ttl", 'f,"21", "19", "concept.graffle", 'f,"22", "19", "concept-ontology.html", 'f,"23", "19", "concept.png", 'f,"24", "1", "house-membership", 'd,"25", "24", "house-membership-ontology.ttl", 'f,"27", "24", "house-membership.graffle", 'f,"28", "24", "house-membership-ontology.html", 'f,"29", "24", "house-membership.png", 'f,"30", "1", "time-period", 'd,"31", "30", "time-period-ontology.html", 'f,"32", "30", "time-period.graffle", 'f,"33", "30", "time-period-ontology.ttl", 'f,"34", "30", "time-period.png", 'f,"35", "1", "election", 'd,"37", "35", "election.graffle", 'f,"38", "35", "election.png", 'f,"39", "35", "election-ontology.ttl", 'f,"40", "35", "election-ontology.html", 'f,"41", "1", "README.md", 'f,"42", "1", "list-of-lists.csv", 'f,"44", "1", "specialised-agency", 'd,"45", "44", "specialised-agency.png", 'f,"46", "44", "specialised-agency.graffle", 'f,"48", "1", "petition", 'd,"49", "48", "petition-ontology.html", 'f,"50", "48", "petition.graffle", 'f,"51", "48", "petition.png", 'f,"52", "48", "petition-ontology.ttl", 'f,"53", "1", "core", 'd,"54", "53", "core-ontology.html", 'f,"55", "53", "core.png", 'f,"56", "53", "core-ontology.ttl", 'f,"57", "53", "core.graffle", 'f,"58", "1", "contact-point", 'd,"59", "58", "contact-point.png", 'f,"60", "58", "contact-point-ontology.html", 'f,"61", "58", "contact-point.graffle", 'f,"63", "58", "contact-point-ontology.ttl", 'f,"64", "1", "urls.csv", 'f,"65", "1", "index.html", 'f) abstract class FSElement(id: String, label: String) final case class FSFile(id: String, label: String) extends FSElement(id, label) final case class FSFolder(id: String, label: String) extends FSElement(id, label) private val rnd = new Random() def childrenOf(id: String): Future[Seq[FSElement]] = { // not optimal but it does the job. val children = data.grouped(4) .filter( e ⇒ e(1) == id ) .map { case e@List( id, pid, name, 'd ) ⇒ FSFolder(id.toString, name.toString) case e@List( id, pid, name, 'f ) ⇒ FSFile(id.toString, name.toString) } .toList // simulate asynchronous load... val p = Promise[Seq[FSElement]]() timers.setTimeout(rnd.nextInt(2000) + 150) { p.success(children) } p.future } }
Example 4
Source File: ClockSkewFromParentTransformer.scala From haystack-traces with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trace.reader.readers.transformers import com.expedia.open.tracing.Span import com.expedia.www.haystack.trace.commons.utils.SpanUtils import com.expedia.www.haystack.trace.reader.readers.utils.{MutableSpanForest, SpanTree} import scala.annotation.tailrec import scala.collection.mutable.ListBuffer import scala.collection.{Seq, mutable} class ClockSkewFromParentTransformer extends SpanTreeTransformer { case class SpanTreeWithParent(spanTree: SpanTree, parent: Option[Span]) override def transform(forest: MutableSpanForest): MutableSpanForest = { val underlyingSpans = new mutable.ListBuffer[Span] forest.getAllTrees.foreach(tree => { adjustSkew(underlyingSpans, List(SpanTreeWithParent(tree, None))) }) forest.updateUnderlyingSpans(underlyingSpans) } @tailrec private def adjustSkew(fixedSpans: ListBuffer[Span], spanTrees: Seq[SpanTreeWithParent]): Unit = { if (spanTrees.isEmpty) return // collect the child trees that need to be corrected for clock skew val childTrees = mutable.ListBuffer[SpanTreeWithParent]() spanTrees.foreach(e => { val rootSpan = e.spanTree.span var adjustedSpan = rootSpan e.parent match { case Some(parentSpan) => adjustedSpan = adjustSpan(rootSpan, parentSpan) fixedSpans += adjustedSpan case _ => fixedSpans += rootSpan } childTrees ++= e.spanTree.children.map(tree => SpanTreeWithParent(tree, Some(adjustedSpan))) }) adjustSkew(fixedSpans, childTrees) } private def adjustSpan(child: Span, parent: Span): Span = { var shift = 0L if (child.getStartTime < parent.getStartTime) { shift = parent.getStartTime - child.getStartTime } val childEndTime = SpanUtils.getEndTime(child) val parentEndTime = SpanUtils.getEndTime(parent) if (parentEndTime < childEndTime + shift) { shift = parentEndTime - childEndTime } if (shift == 0L) { child } else { Span.newBuilder(child).setStartTime(child.getStartTime + shift).build() } } }
Example 5
Source File: TestHelper.scala From odsc-west-streaming-trends with GNU General Public License v3.0 | 5 votes |
package com.twilio.open.streaming.trend.discovery import java.io.{ByteArrayInputStream, InputStream} import java.nio.charset.StandardCharsets import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.google.protobuf.Message import com.googlecode.protobuf.format.JsonFormat import com.holdenkarau.spark.testing.{LocalSparkContext, SparkContextProvider} import com.twilio.open.protocol.Calls.CallEvent import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.sql.SparkSession import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers, Suite} import org.slf4j.{Logger, LoggerFactory} import scala.collection.Seq import scala.io.Source import scala.reflect.ClassTag import scala.reflect.classTag object TestHelper { val log: Logger = LoggerFactory.getLogger("com.twilio.open.streaming.trend.discovery.TestHelper") val mapper: ObjectMapper = { val m = new ObjectMapper() m.registerModule(DefaultScalaModule) } val jsonFormat: JsonFormat = new JsonFormat def loadScenario[T<: Message : ClassTag](file: String): Seq[T] = { val fileString = Source.fromFile(file).mkString val parsed = mapper.readValue(fileString, classOf[Sceanario]) parsed.input.map { data => val json = mapper.writeValueAsString(data) convert[T](json) } } def convert[T<: Message : ClassTag](json: String): T = { val clazz = classTag[T].runtimeClass val builder = clazz.getMethod("newBuilder").invoke(clazz).asInstanceOf[Message.Builder] try { val input: InputStream = new ByteArrayInputStream(json.getBytes()) jsonFormat.merge(input, builder) builder.build().asInstanceOf[T] } catch { case e: Exception => throw e } } def asMockKafkaDataFrame(event: CallEvent): MockKafkaDataFrame = { val key = event.getEventId.getBytes(StandardCharsets.UTF_8) val value = event.toByteArray MockKafkaDataFrame(key, value) } } case class MockKafkaDataFrame(key: Array[Byte], value: Array[Byte]) @SerialVersionUID(1L) case class KafkaDataFrame(key: Array[Byte], topic: Array[Byte], value: Array[Byte]) extends Serializable case class Sceanario(input: Seq[Any], expected: Option[Any] = None) trait SparkSqlTest extends BeforeAndAfterAll with SparkContextProvider { self: Suite => @transient var _sparkSql: SparkSession = _ @transient private var _sc: SparkContext = _ override def sc: SparkContext = _sc def conf: SparkConf def sparkSql: SparkSession = _sparkSql override def beforeAll() { _sparkSql = SparkSession.builder().config(conf).getOrCreate() _sc = _sparkSql.sparkContext setup(_sc) super.beforeAll() } override def afterAll() { try { _sparkSql.close() _sparkSql = null LocalSparkContext.stop(_sc) _sc = null } finally { super.afterAll() } } }
Example 6
Source File: EmbeddedKafkaCluster.scala From ksql-jdbc-driver with Apache License 2.0 | 5 votes |
package com.github.mmolimar.ksql.jdbc.embedded import java.io.File import java.util.Properties import com.github.mmolimar.ksql.jdbc.utils.TestUtils import kafka.server.{KafkaConfig, KafkaServer} import kafka.utils.Logging import kafka.zk.AdminZkClient import scala.collection.Seq class EmbeddedKafkaCluster(zkConnection: String, ports: Seq[Int] = Seq(TestUtils.getAvailablePort), baseProps: Properties = new Properties) extends Logging { private val actualPorts: Seq[Int] = ports.map(resolvePort) private var brokers: Seq[KafkaServer] = Seq.empty private var logDirs: Seq[File] = Seq.empty private lazy val zkClient = TestUtils.buildZkClient(zkConnection) private lazy val adminZkClient = new AdminZkClient(zkClient) def startup(): Unit = { info("Starting up embedded Kafka brokers") for ((port, i) <- actualPorts.zipWithIndex) { val logDir: File = TestUtils.makeTempDir("kafka-local") val properties: Properties = new Properties(baseProps) properties.setProperty(KafkaConfig.ZkConnectProp, zkConnection) properties.setProperty(KafkaConfig.ZkSyncTimeMsProp, i.toString) properties.setProperty(KafkaConfig.BrokerIdProp, (i + 1).toString) properties.setProperty(KafkaConfig.HostNameProp, "localhost") properties.setProperty(KafkaConfig.AdvertisedHostNameProp, "localhost") properties.setProperty(KafkaConfig.PortProp, port.toString) properties.setProperty(KafkaConfig.AdvertisedPortProp, port.toString) properties.setProperty(KafkaConfig.LogDirProp, logDir.getAbsolutePath) properties.setProperty(KafkaConfig.NumPartitionsProp, 1.toString) properties.setProperty(KafkaConfig.AutoCreateTopicsEnableProp, true.toString) properties.setProperty(KafkaConfig.DeleteTopicEnableProp, true.toString) properties.setProperty(KafkaConfig.LogFlushIntervalMessagesProp, 1.toString) properties.setProperty(KafkaConfig.OffsetsTopicReplicationFactorProp, 1.toString) info(s"Local directory for broker ID ${i + 1} is ${logDir.getAbsolutePath}") brokers :+= startBroker(properties) logDirs :+= logDir } info(s"Started embedded Kafka brokers: $getBrokerList") } def shutdown(): Unit = { brokers.foreach(broker => TestUtils.swallow(broker.shutdown)) logDirs.foreach(logDir => TestUtils.swallow(TestUtils.deleteFile(logDir))) } def getPorts: Seq[Int] = actualPorts def getBrokerList: String = actualPorts.map("localhost:" + _).mkString(",") def createTopic(topic: String, numPartitions: Int = 1, replicationFactor: Int = 1): Unit = { info(s"Creating topic $topic") adminZkClient.createTopic(topic, numPartitions, replicationFactor) } def deleteTopic(topic: String) { info(s"Deleting topic $topic") adminZkClient.deleteTopic(topic) } def deleteTopics(topics: Seq[String]): Unit = topics.foreach(deleteTopic) def existTopic(topic: String): Boolean = zkClient.topicExists(topic) def listTopics: Set[String] = zkClient.getAllTopicsInCluster private def resolvePort(port: Int) = if (port <= 0) TestUtils.getAvailablePort else port private def startBroker(props: Properties): KafkaServer = { val server = new KafkaServer(new KafkaConfig(props)) server.startup server } override def toString: String = { val sb: StringBuilder = StringBuilder.newBuilder sb.append("Kafka{") sb.append("brokerList='").append(getBrokerList).append('\'') sb.append('}') sb.toString } }
Example 7
Source File: package.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package scala.meta import com.intellij.openapi.project.DumbService import com.intellij.psi.ResolveResult import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes import org.jetbrains.plugins.scala.lang.psi.api.base.ScPrimaryConstructor import org.jetbrains.plugins.scala.lang.psi.api.expr.ScAnnotation import org.jetbrains.plugins.scala.lang.psi.api.statements.ScAnnotationsHolder import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScTemplateDefinition, ScTypeDefinition} import org.jetbrains.plugins.scala.lang.psi.impl.base.ScStableCodeReferenceElementImpl import org.jetbrains.plugins.scala.lang.psi.stubs.elements.ScStubElementType import org.jetbrains.plugins.scala.lang.resolve.processor.ResolveProcessor import org.jetbrains.plugins.scala.macroAnnotations.{CachedWithRecursionGuard, ModCount} import scala.collection.Seq package object intellij { object psiExt { implicit class AnnotExt(val annotation: ScAnnotation) extends AnyVal { def isMetaMacro: Boolean = { def hasMetaAnnotation(results: Array[ResolveResult]) = results.map(_.getElement).exists { case c: ScPrimaryConstructor => c.containingClass.isMetaAnnotatationImpl case o: ScTypeDefinition => o.isMetaAnnotatationImpl case _ => false } // do not resolve anything while the stubs are building to avoid deadlocks if (ScStubElementType.isStubBuilding || DumbService.isDumb(annotation.getProject)) return false annotation.constructor.reference.exists { case stRef: ScStableCodeReferenceElementImpl => val processor = new ResolveProcessor(stRef.getKinds(incomplete = false), stRef, stRef.refName) hasMetaAnnotation(stRef.doResolve(processor)) case _ => false } } } implicit class AnnotHolderExt(val ah: ScAnnotationsHolder) extends AnyVal { def getMetaCompanionObject: Option[scala.meta.Defn.Object] = { import scala.{meta => m} ah.getMetaExpansion match { case Left(_) => None case Right(m.Term.Block(Seq(_: m.Defn, obj: m.Defn.Object))) => Some(obj) case Right(_) => None } } def getMetaExpansion: Either[String, scala.meta.Tree] = { @CachedWithRecursionGuard(ah, Left("Recursive meta expansion"), ModCount.getBlockModificationCount) def getMetaExpansionCached: Either[String, Tree] = { //CHANGED if (ScalaProjectSettings.getInstance(ah.getProject).getScalaMetaMode == ScalaMetaMode.Enabled) { if (true) { val metaAnnotation = ah.annotations.find(AnnotExt(_).isMetaMacro) metaAnnotation match { case Some(annot) => MetaExpansionsManager.runMetaAnnotation(annot) case None => Left("No meta annotation") } } else Left("Meta expansions disabled in settings") } // no annotations must run or any non-physical PSI generated while stubs are building if (ScStubElementType.isStubBuilding) Left("") else getMetaExpansionCached } } implicit class TemplateDefExt(val td: ScTemplateDefinition) extends AnyVal { def isMetaAnnotatationImpl: Boolean = { td.members.exists(_.getModifierList.findChildrenByType(ScalaTokenTypes.kINLINE).nonEmpty) || td.members.exists({ case ah: ScAnnotationsHolder => ah.hasAnnotation("scala.meta.internal.inline.inline") }) } } } }
Example 8
Source File: ScDeclarationSequenceHolder.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi import java.lang import com.intellij.openapi.progress.ProgressManager import com.intellij.psi._ import com.intellij.psi.scope._ import com.intellij.psi.util.PsiTreeUtil import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement import org.jetbrains.plugins.scala.lang.psi.api.statements.ScDeclaredElementsHolder import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScNamedElement import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScTypeDefinition, ScTrait, ScClass, ScObject} import org.jetbrains.plugins.scala.lang.resolve.ResolveTargets import org.jetbrains.plugins.scala.lang.resolve.processor.BaseProcessor import scala.collection.Seq trait ScDeclarationSequenceHolder extends ScalaPsiElement { override def processDeclarations(processor: PsiScopeProcessor, state : ResolveState, lastParent: PsiElement, place: PsiElement): Boolean = { def processElement(e: PsiElement, state: ResolveState): Boolean = { def isOkCompanionModule = { processor match { case b: BaseProcessor => b.kinds.contains(ResolveTargets.OBJECT) || b.kinds.contains(ResolveTargets.VAL) case _ => true } } def isOkForFakeCompanionModule(t: ScTypeDefinition): Boolean = { isOkCompanionModule && t.fakeCompanionModule.isDefined } e match { case c: ScClass => processor.execute(c, state) if (isOkForFakeCompanionModule(c)) { processor.execute(c.fakeCompanionModule.get, state) } c.getSyntheticImplicitMethod match { case Some(impl) => if (!processElement(impl, state)) return false case _ => } true case t: ScTrait => processor.execute(t, state) if (isOkForFakeCompanionModule(t)) { processor.execute(t.fakeCompanionModule.get, state) } true case named: ScNamedElement => processor.execute(named, state) case holder: ScDeclaredElementsHolder => val elements: Seq[PsiNamedElement] = holder.declaredElements var i = 0 while (i < elements.length) { ProgressManager.checkCanceled() if (!processor.execute(elements(i), state)) return false i = i + 1 } true case _ => true } } if (lastParent != null) { var run = lastParent match { case element: ScalaPsiElement => element.getDeepSameElementInContext case _ => lastParent } while (run != null) { ProgressManager.checkCanceled() place match { case id: ScStableCodeReferenceElement => run match { case po: ScObject if po.isPackageObject && id.qualName == po.qualifiedName => // do nothing case _ => if (!processElement(run, state)) return false } case _ => if (!processElement(run, state)) return false } run = run.getPrevSibling } //forward references are allowed (e.g. 2 local methods see each other) run = lastParent.getNextSibling val forwardState = state.put(BaseProcessor.FORWARD_REFERENCE_KEY, lang.Boolean.TRUE) while (run != null) { ProgressManager.checkCanceled() if (!processElement(run, forwardState)) return false run = run.getNextSibling } } true } }
Example 9
Source File: ScMethodCallImpl.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi.impl.expr import com.intellij.lang.ASTNode import com.intellij.psi.PsiElementVisitor import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElementImpl import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor import org.jetbrains.plugins.scala.lang.psi.api.expr._ import scala.collection.Seq class ScMethodCallImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScMethodCall { def getInvokedExpr: ScExpression = findChildByClassScala(classOf[ScExpression]) def argumentExpressions: Seq[ScExpression] = if (args != null) args.exprs else Nil override def getEffectiveInvokedExpr: ScExpression = { findChildByClassScala(classOf[ScExpression]) match { case x: ScParenthesisedExpr => x.expr.getOrElse(x) case x => x } } override def argumentExpressionsIncludeUpdateCall: Seq[ScExpression] = { updateExpression() match { case Some(expr) => argumentExpressions ++ Seq(expr) case _ => argumentExpressions } } override def accept(visitor: PsiElementVisitor) { visitor match { case visitor: ScalaElementVisitor => super.accept(visitor) case _ => super.accept(visitor) } } override def toString: String = "MethodCall" }
Example 10
Source File: ScInfixExprImpl.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi.impl.expr import com.intellij.lang.ASTNode import com.intellij.psi.PsiElementVisitor import org.jetbrains.plugins.scala.extensions._ import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElementImpl import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor import org.jetbrains.plugins.scala.lang.psi.api.expr._ import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory import org.jetbrains.plugins.scala.lang.psi.types.result.TypeResult import scala.collection.Seq class ScInfixExprImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScInfixExpr { override def toString: String = "InfixExpression" override def argumentExpressions: Seq[ScExpression] = { if (isRightAssoc) Seq(lOp) else rOp match { case tuple: ScTuple => tuple.exprs case t: ScParenthesisedExpr => t.expr match { case Some(expr) => Seq(expr) case None => Seq(t) } case _: ScUnitExpr => Seq.empty case expr => Seq(expr) } } protected override def innerType: TypeResult = { operation.bind() match { //this is assignment statement: x += 1 equals to x = x + 1 case Some(r) if r.element.name + "=" == operation.refName => super.innerType val lText = lOp.getText val rText = rOp.getText val exprText = s"$lText = $lText ${r.element.name} $rText" val newExpr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprText, getContext, this) newExpr.`type`() case _ => super.innerType } } override def accept(visitor: ScalaElementVisitor) { visitor.visitInfixExpression(this) } override def accept(visitor: PsiElementVisitor) { visitor match { case visitor: ScalaElementVisitor => visitor.visitInfixExpression(this) case _ => super.accept(visitor) } } }
Example 11
Source File: LocationSpec.scala From scala-spark-cab-rides-predictions with MIT License | 5 votes |
import models.{Location, LocationRepository} import org.scalatest.{FlatSpec, Matchers} import scala.collection.Seq class LocationSpec extends FlatSpec with Matchers { behavior of "Location" "repository locations" should " return Seq[Location]" in { val locations = LocationRepository.getLocations locations should matchPattern { case _: Seq[Location] => } } "random pairing of locations" should "return Seq oftype of locations" in { val locationsTuples = LocationRepository.getPairedLocations locationsTuples should matchPattern { case _: Seq[(Location, Location)] => } } }
Example 12
Source File: ImplicitCollectorCache.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi.implicits import com.intellij.openapi.project.Project import com.intellij.psi.PsiElement import com.intellij.util.containers.ContainerUtil import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeParametersOwner import org.jetbrains.plugins.scala.lang.psi.types.ScType import org.jetbrains.plugins.scala.lang.psi.types.api.TypeParameterType import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult import scala.collection.{Seq, Set} class ImplicitCollectorCache(project: Project) { private val map = ContainerUtil.newConcurrentMap[(ImplicitSearchScope, ScType), Seq[ScalaResolveResult]]() private val typeParametersOwnersCache = ContainerUtil.newConcurrentMap[ScType, Set[ScTypeParametersOwner]] def get(place: PsiElement, tp: ScType): Option[Seq[ScalaResolveResult]] = { val scope = ImplicitSearchScope.forElement(place) Option(map.get((scope, tp))) } def put(place: PsiElement, tp: ScType, value: Seq[ScalaResolveResult]): Unit = { val scope = ImplicitSearchScope.forElement(place) map.put((scope, tp), value) } def typeParametersOwners(tp: ScType): Set[ScTypeParametersOwner] = { def collectOwners: Set[ScTypeParametersOwner] = { var result = Set[ScTypeParametersOwner]() tp.visitRecursively { case TypeParameterType(_, _, _, psiTP) => psiTP.getOwner match { case f: ScFunction => result += f case _ => } case _ => } result } typeParametersOwnersCache.get(tp) match { case null => val owners = collectOwners typeParametersOwnersCache.putIfAbsent(tp, owners) owners case seq => seq } } def clear(): Unit = { map.clear() typeParametersOwnersCache.clear() } }
Example 13
Source File: TypeParameterType.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi.types.api import com.intellij.psi.PsiTypeParameter import org.jetbrains.plugins.scala.extensions.PsiNamedElementExt import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{PsiTypeParameterExt, ScTypeParam} import org.jetbrains.plugins.scala.lang.psi.types.result._ import org.jetbrains.plugins.scala.lang.psi.types.{NamedType, ScSubstitutor, ScType, ScUndefinedSubstitutor} import org.jetbrains.plugins.scala.project.ProjectContext import scala.collection.Seq sealed trait TypeParameterType extends ValueType with NamedType { val arguments: Seq[TypeParameterType] def lowerType: ScType def upperType: ScType def psiTypeParameter: PsiTypeParameter override implicit def projectContext: ProjectContext = psiTypeParameter override val name: String = psiTypeParameter.name def nameAndId: (String, Long) = psiTypeParameter.nameAndId def isInvariant: Boolean = psiTypeParameter match { case typeParam: ScTypeParam => !typeParam.isCovariant && !typeParam.isContravariant case _ => false } def isCovariant: Boolean = psiTypeParameter match { case typeParam: ScTypeParam => typeParam.isCovariant case _ => false } def isContravariant: Boolean = psiTypeParameter match { case typeParam: ScTypeParam => typeParam.isContravariant case _ => false } override def equivInner(`type`: ScType, substitutor: ScUndefinedSubstitutor, falseUndef: Boolean): (Boolean, ScUndefinedSubstitutor) = (`type` match { case that: TypeParameterType => (that.psiTypeParameter eq psiTypeParameter) || { (psiTypeParameter, that.psiTypeParameter) match { case (myBound: ScTypeParam, thatBound: ScTypeParam) => //TODO this is a temporary hack, so ignore substitutor for now myBound.lowerBound.exists(_.equiv(thatBound.lowerBound.getOrNothing)) && myBound.upperBound.exists(_.equiv(thatBound.upperBound.getOrNothing)) && (myBound.name == thatBound.name || thatBound.isHigherKindedTypeParameter || myBound.isHigherKindedTypeParameter) case _ => false } } case _ => false }, substitutor) override def visitType(visitor: TypeVisitor): Unit = visitor.visitTypeParameterType(this) } object TypeParameterType { def apply(tp: TypeParameter): TypeParameterType = LazyTpt(tp, Some(ScSubstitutor.empty)) def apply(psiTp: PsiTypeParameter, maybeSubstitutor: Option[ScSubstitutor] = Some(ScSubstitutor.empty)): TypeParameterType = LazyTpt(TypeParameter(psiTp), maybeSubstitutor) def apply(arguments: Seq[TypeParameterType], lowerType: ScType, upperType: ScType, psiTypeParameter: PsiTypeParameter): TypeParameterType = StrictTpt(arguments, lowerType, upperType, psiTypeParameter) def unapply(tpt: TypeParameterType): Option[(Seq[TypeParameterType], ScType, ScType, PsiTypeParameter)] = Some(tpt.arguments, tpt.lowerType, tpt.upperType, tpt.psiTypeParameter) private case class LazyTpt(typeParameter: TypeParameter, maybeSubstitutor: Option[ScSubstitutor] = Some(ScSubstitutor.empty)) extends TypeParameterType { val arguments: Seq[TypeParameterType] = typeParameter.typeParameters.map(LazyTpt(_, maybeSubstitutor)) lazy val lowerType: ScType = lift(typeParameter.lowerType) lazy val upperType: ScType = lift(typeParameter.upperType) def psiTypeParameter: PsiTypeParameter = typeParameter.psiTypeParameter private def lift(tp: ScType): ScType = maybeSubstitutor match { case Some(s) => s.subst(tp) case _ => tp } } private case class StrictTpt(arguments: Seq[TypeParameterType], override val lowerType: ScType, override val upperType: ScType, psiTypeParameter: PsiTypeParameter) extends TypeParameterType }
Example 14
Source File: ScFun.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi.api.statements import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeParametersOwner import org.jetbrains.plugins.scala.lang.psi.types.ScType import org.jetbrains.plugins.scala.lang.psi.types.api.TypeParameter import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.{Parameter, ScMethodType, ScTypePolymorphicType} import scala.collection.Seq //some functions are not PsiMethods and are e.g. not visible from java //see ScSyntheticFunction trait ScFun extends ScTypeParametersOwner { def retType: ScType def paramClauses: Seq[Seq[Parameter]] def methodType: ScType = { paramClauses.foldRight[ScType](retType) { (params: Seq[Parameter], tp: ScType) => ScMethodType(tp, params, isImplicit = false) } } def polymorphicType: ScType = { if (typeParameters.isEmpty) methodType else ScTypePolymorphicType(methodType, typeParameters.map(TypeParameter(_))) } }
Example 15
Source File: package.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala import com.intellij.ide.util.treeView.AbstractTreeNode import org.jetbrains.plugins.scala.extensions._ import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScObject, ScTrait, ScTypeDefinition} import scala.collection.Seq package object projectView { type Node = AbstractTreeNode[_] object WorksheetFile { def unapply(file: ScalaFile): Boolean = file.isWorksheetFile } object ScriptFile { def unapply(file: ScalaFile): Boolean = file.isScriptFile } object ScalaDialectFile { def unapply(file: ScalaFile): Boolean = file.getFileType != ScalaFileType.INSTANCE } object SingularDefinition { def unapply(file: ScalaFile): Option[(ScTypeDefinition)] = Some(file.typeDefinitions) collect { case Seq(definition) if matchesFileName(definition) => definition case Seq(definition) if definition.isPackageObject => definition } private def matchesFileName(definition: ScTypeDefinition): Boolean = definition.containingVirtualFile.forall(_.getNameWithoutExtension == definition.name) } object ClassAndCompanionObject { def unapply(file: ScalaFile): Option[(ScClass, ScObject)] = Some(file.typeDefinitions) collect { case PairedTypeDefinitions(aClass: ScClass, anObject: ScObject) => (aClass, anObject) case PairedTypeDefinitions(anObject: ScObject, aClass: ScClass) => (aClass, anObject) } } object TraitAndCompanionObject { def unapply(file: ScalaFile): Option[(ScTrait, ScObject)] = Some(file.typeDefinitions) collect { case PairedTypeDefinitions(aTrait: ScTrait, anObject: ScObject) => (aTrait, anObject) case PairedTypeDefinitions(anObject: ScObject, aTrait: ScTrait) => (aTrait, anObject) } } private object PairedTypeDefinitions { def unapply(definitions: Seq[ScTypeDefinition]): Option[(ScTypeDefinition, ScTypeDefinition)] = Some(definitions) collect { case Seq(definition1: ScTypeDefinition, definition2: ScTypeDefinition) if definition1.name == definition2.name => (definition1, definition2) } } }
Example 16
Source File: dependencies.scala From quark with Apache License 2.0 | 5 votes |
package quark.project import scala.collection.Seq import sbt._, Keys._ object dependencies { private val argonautVersion = "6.2-M3" private val monocleVersion = "1.3.2" private val quasarVersion = "14.5.7" private val pathyVersion = "0.2.2" private val refinedVersion = "0.5.0" private val scalacheckVersion = "1.12.5" private val scalazVersion = "7.2.8" private val specs2Version = "3.8.4-scalacheck-1.12.5" val core = Seq( "commons-codec" % "commons-codec" % "1.10", "org.scalaz" %% "scalaz-core" % scalazVersion, "org.quasar-analytics" %% "quasar-foundation-internal" % quasarVersion, "org.quasar-analytics" %% "quasar-foundation-internal" % quasarVersion % "test" classifier "tests", "org.quasar-analytics" %% "quasar-connector-internal" % quasarVersion, "org.quasar-analytics" %% "quasar-connector-internal" % quasarVersion % "test" classifier "tests", "org.quasar-analytics" %% "quasar-core-internal" % quasarVersion, "org.quasar-analytics" %% "quasar-core-internal" % quasarVersion % "test" classifier "tests", "org.quasar-analytics" %% "quasar-frontend-internal" % quasarVersion, "org.quasar-analytics" %% "quasar-frontend-internal" % quasarVersion % "test" classifier "tests", "com.github.julien-truffaut" %% "monocle-core" % monocleVersion, "com.nimbusds" % "oauth2-oidc-sdk" % "5.13", "com.slamdata" %% "pathy-core" % pathyVersion, "com.slamdata" %% "pathy-argonaut" % pathyVersion, "com.github.scopt" %% "scopt" % "3.5.0", "eu.timepit" %% "refined" % refinedVersion, "eu.timepit" %% "refined-scalacheck" % refinedVersion % "test", "io.argonaut" %% "argonaut" % argonautVersion, "io.argonaut" %% "argonaut-monocle" % argonautVersion, "io.argonaut" %% "argonaut-scalaz" % argonautVersion, "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test", "org.specs2" %% "specs2-core" % specs2Version % "test", "org.specs2" %% "specs2-scalacheck" % specs2Version % "test", "org.scalaz" %% "scalaz-scalacheck-binding" % scalazVersion % "test", "org.typelevel" %% "scalaz-specs2" % "0.4.0" % "test" ) }
Example 17
Source File: TestJLDFormat.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.serialization.jsonld import java.util.{HashMap => JHashMap} import com.github.jsonldjava.core.JsonLdOptions import com.github.jsonldjava.core.JsonLdProcessor import com.github.jsonldjava.utils.JsonUtils import org.clulab.serialization.json.stringify import org.clulab.wm.eidos.document.AnnotatedDocument import org.clulab.wm.eidos.document.AnnotatedDocument.Corpus import org.clulab.wm.eidos.test.TestUtils.ExtractionTest import org.clulab.wm.eidos.text.english.cag.CAG._ import scala.collection.Seq class TestJLDFormat extends ExtractionTest { def newTitledAnnotatedDocument(text: String): AnnotatedDocument = newTitledAnnotatedDocument(text, text) def newTitledAnnotatedDocument(text: String, title: String): AnnotatedDocument = { val annotatedDocument = ieSystem.extractFromText(text) annotatedDocument.document.id = Some(title) annotatedDocument } def serialize(corpus: Corpus): String = { val jldCorpus = new JLDCorpus(corpus) val jValue = jldCorpus.serialize() stringify(jValue, pretty = true) } behavior of "JLDSerializer" it should "serialize in a readable way" in { val json = serialize(Seq(newTitledAnnotatedDocument(p1, "p1"))) json should not be empty // See https://github.com/jsonld-java/jsonld-java // Read the string into an Object (The type of this object will be a List, Map, String, Boolean, // Number or null depending on the root object in the file). val jsonObject = Option(JsonUtils.fromString(json)) .getOrElse(throw new Exception("jsonObject is empty")) // Create a context JSON map containing prefixes and definitions val context = new JHashMap() // Customise context... // Create an instance of JsonLdOptions with the standard JSON-LD options val options = new JsonLdOptions // Customise options... // Call whichever JSONLD function you want! (e.g. compact) val compact = JsonUtils.toPrettyString(JsonLdProcessor.compact(jsonObject, context, options)) compact should not be empty val expand = JsonUtils.toPrettyString(JsonLdProcessor.expand(jsonObject)) expand should not be empty val flatten = JsonUtils.toPrettyString(JsonLdProcessor.flatten(jsonObject, options)) flatten should not be empty val normalize = JsonUtils.toPrettyString(JsonLdProcessor.normalize(jsonObject)) normalize should not be empty } }
Example 18
Source File: ExtractFromText.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps.examples import scala.collection.Seq import org.clulab.serialization.json.stringify import org.clulab.wm.eidos.EidosSystem import org.clulab.wm.eidos.serialization.json.WMJSONSerializer import org.clulab.wm.eidos.serialization.jsonld.JLDCorpus import org.clulab.wm.eidos.utils.DisplayUtils.displayMention object ExtractFromText extends App { val text = "Water trucking has decreased due to the cost of fuel." // Initialize the reader val reader = new EidosSystem() // Extract the mentions val annotatedDocument = reader.extractFromText(text) // Display in a pretty way annotatedDocument.odinMentions.foreach(displayMention) // Export to JSON-LD val corpus = new JLDCorpus(annotatedDocument) val mentionsJSONLD = corpus.serialize() println(stringify(mentionsJSONLD, pretty = true)) // Or... optionally serialize to regular JSON // (e.g., if you want to later reload the mentions for post-processing) val mentionsJSON = WMJSONSerializer.jsonAST(annotatedDocument.odinMentions) println(stringify(mentionsJSON, pretty = true)) }
Example 19
Source File: TestHelper.scala From spark-summit-2018 with GNU General Public License v3.0 | 5 votes |
package com.twilio.open.streaming.trend.discovery import java.io.{ByteArrayInputStream, InputStream} import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.google.protobuf.Message import com.googlecode.protobuf.format.JsonFormat import com.holdenkarau.spark.testing.{LocalSparkContext, SparkContextProvider} import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.sql.SparkSession import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers, Suite} import org.slf4j.{Logger, LoggerFactory} import scala.collection.Seq import scala.io.Source import scala.reflect.ClassTag import scala.reflect.classTag object TestHelper { val log: Logger = LoggerFactory.getLogger("com.twilio.open.streaming.trend.discovery.TestHelper") val mapper: ObjectMapper = { val m = new ObjectMapper() m.registerModule(DefaultScalaModule) } val jsonFormat: JsonFormat = new JsonFormat def loadScenario[T<: Message : ClassTag](file: String): Seq[T] = { val fileString = Source.fromFile(file).mkString val parsed = mapper.readValue(fileString, classOf[Sceanario]) parsed.input.map { data => val json = mapper.writeValueAsString(data) convert[T](json) } } def convert[T<: Message : ClassTag](json: String): T = { val clazz = classTag[T].runtimeClass val builder = clazz.getMethod("newBuilder").invoke(clazz).asInstanceOf[Message.Builder] try { val input: InputStream = new ByteArrayInputStream(json.getBytes()) jsonFormat.merge(input, builder) builder.build().asInstanceOf[T] } catch { case e: Exception => throw e } } } @SerialVersionUID(1L) case class KafkaDataFrame(key: Array[Byte], topic: Array[Byte], value: Array[Byte]) extends Serializable case class Sceanario(input: Seq[Any], expected: Option[Any] = None) trait SparkSqlTest extends BeforeAndAfterAll with SparkContextProvider { self: Suite => @transient var _sparkSql: SparkSession = _ @transient private var _sc: SparkContext = _ override def sc: SparkContext = _sc def conf: SparkConf def sparkSql: SparkSession = _sparkSql override def beforeAll() { _sparkSql = SparkSession.builder().config(conf).getOrCreate() _sc = _sparkSql.sparkContext setup(_sc) super.beforeAll() } override def afterAll() { try { _sparkSql.close() _sparkSql = null LocalSparkContext.stop(_sc) _sc = null } finally { super.afterAll() } } }
Example 20
Source File: JsonSerializationFailed.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.playjson import scala.collection.Seq import play.api.libs.json.JsPath import play.api.libs.json.JsValue import play.api.libs.json.Json import play.api.libs.json.JsonValidationError class JsonSerializationFailed private[lagom] ( message: String, errors: Seq[(JsPath, Seq[JsonValidationError])], json: JsValue ) extends RuntimeException { override def getMessage: String = s"$message\nerrors:\n${errors.map(errorToString).mkString("\t", "\n\t", "\n")}}\n${Json.prettyPrint(json)}" private def errorToString(t: (JsPath, Seq[JsonValidationError])) = t match { case (path, pathErrors) => s"$path: " + pathErrors.mkString(", ") } }