scala.language.implicitConversions Scala Examples
The following examples show how to use scala.language.implicitConversions.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ResourceType.scala From jsonapi-scala with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.qvantel.jsonapi import scala.language.implicitConversions import _root_.spray.json._ trait ResourceType[A] { def resourceType: String } object ResourceType { def apply[A](implicit rt: ResourceType[A]): ResourceType[A] = rt implicit def apply[A](s: String): ResourceType[A] = new ResourceType[A] { override def resourceType: String = s } implicit def resourceTypeJsonFormat[A](rt: ResourceType[A]): JsonFormat[ResourceType[A]] = new JsonFormat[ResourceType[A]] { override def write(obj: ResourceType[A]): JsValue = JsString(obj.resourceType) override def read(json: JsValue): ResourceType[A] = json match { case JsString(s) => new ResourceType[A] { override def resourceType: String = s } case invalid => deserializationError(s"Expected ResourceType as JsString but got ‘$invalid’") } } }
Example 2
Source File: CatsConcurrentEffectSyntax.scala From interop-cats with Apache License 2.0 | 5 votes |
package zio.interop import cats.effect.ConcurrentEffect import zio.{ IO, RIO, Runtime, Task, ZIO } import scala.language.implicitConversions trait CatsConcurrentEffectSyntax { implicit final def ZIOConcurrentEffectOps(@deprecated("", "") zio: ZIO.type): CatsConcurrentEffectSyntax.zioOps.type = CatsConcurrentEffectSyntax.zioOps implicit final def RIOConcurrentEffectOps(@deprecated("", "") rio: RIO.type): CatsConcurrentEffectSyntax.zioOps.type = CatsConcurrentEffectSyntax.zioOps implicit final def IOConcurrentEffectOps(@deprecated("", "") io: IO.type): CatsConcurrentEffectSyntax.ioOps.type = CatsConcurrentEffectSyntax.ioOps implicit final def TaskConcurrentEffectOps(@deprecated("", "") io: Task.type): CatsConcurrentEffectSyntax.ioOps.type = CatsConcurrentEffectSyntax.ioOps } private[interop] object CatsConcurrentEffectSyntax { object zioOps { final def concurrentEffect[R]: ZIO[R, Nothing, ConcurrentEffect[RIO[R, *]]] = ZIO.runtime.map(catz.taskEffectInstance(_: Runtime[R])) final def concurrentEffectWith[R, E, A](f: ConcurrentEffect[RIO[R, *]] => ZIO[R, E, A]): ZIO[R, E, A] = ZIO.runtime.flatMap(f apply catz.taskEffectInstance(_: Runtime[R])) } object ioOps { final def concurrentEffect: ZIO[Any, Nothing, ConcurrentEffect[RIO[Any, *]]] = ZIO.runtime.map(catz.taskEffectInstance(_: Runtime[Any])) def concurrentEffectWith[E, A](f: ConcurrentEffect[RIO[Any, *]] => ZIO[Any, E, A]): ZIO[Any, E, A] = ZIO.runtime.flatMap(f apply catz.taskEffectInstance(_: Runtime[Any])) } }
Example 3
Source File: CourierExceptionCompat.scala From courier with Apache License 2.0 | 5 votes |
package org.coursera.courier.sbt import xsbti.{Severity, Problem, Position} import java.util.Optional import sbt._ import scala.language.implicitConversions object CourierExceptionCompat { class CourierCompilationException( source: Option[File], message: String, atLine: Option[Int], column: Option[Int], severity: Severity) extends xsbti.CompileFailed with FeedbackProvidedException { def arguments(): Array[String] = Array() def problems(): Array[Problem] = Array(new CourierCompilationProblem(source, message, atLine, column, severity)) def line = atLine.map(_.asInstanceOf[java.lang.Integer]).orNull def position = column.map(_.asInstanceOf[java.lang.Integer]).orNull def sourceName = source.map(_.getAbsolutePath).orNull } class CourierCompilationProblem( source: Option[File], msg: String, atLine: Option[Int], column: Option[Int], svrty: Severity) extends Problem { def category(): String = "Courier" def severity(): Severity = svrty def message(): String = msg def position(): Position = new CourierCompilationErrorPosition(source, atLine, column) } class CourierCompilationErrorPosition( source: Option[File], atLine: Option[Int], column: Option[Int]) extends Position { def toMaybe[T](option: Option[T]) = option.map { optionValue => optionValue }.asJava def line(): Optional[Integer] = toMaybe(atLine.map(_.asInstanceOf[java.lang.Integer])) def lineContent(): String = "" def offset(): Optional[Integer] = toMaybe(column.map(_.asInstanceOf[java.lang.Integer])) def pointer(): Optional[Integer] = Optional.empty[java.lang.Integer] def pointerSpace(): Optional[String] = Optional.empty[String] def sourcePath(): Optional[String] = toMaybe(source.map(_.getAbsolutePath)) def sourceFile(): Optional[File] = toMaybe(source) } }
Example 4
Source File: NamedCollection.scala From flamy with Apache License 2.0 | 5 votes |
package com.flaminem.flamy.utils.collection.mutable import com.flaminem.flamy.utils.Named import scala.collection.generic.CanBuildFrom import scala.collection.mutable.{Builder, ListBuffer} import scala.collection.{TraversableLike, mutable} import scala.language.implicitConversions class NamedCollection[V <: Named] extends IndexedCollection[String, V] with TraversableLike[V, NamedCollection[V]] { override def newBuilder: mutable.Builder[V, NamedCollection[V]] = { new ListBuffer[V] mapResult{x => new NamedCollection(x)} } def this(namedItems: Traversable[V]) = { this this++=namedItems } def getIndexOf(value: V): String = value.getName.toLowerCase } object NamedCollection { def newBuilder[V <: Named](): Builder[V, NamedCollection[V]] = { new Builder[V, NamedCollection[V]]() { val buffer = new NamedCollection[V]() override def +=(elem: V): this.type = { buffer += elem this } override def clear(): Unit = { buffer.clear() } override def result(): NamedCollection[V] = { buffer } } } implicit def canBuildFrom[In <: Named, Out <: Named]: CanBuildFrom[NamedCollection[In], Out, NamedCollection[Out]] = { new CanBuildFrom[NamedCollection[In], Out, NamedCollection[Out]]() { override def apply(from: NamedCollection[In]): Builder[Out, NamedCollection[Out]] = newBuilder() override def apply(): Builder[Out, NamedCollection[Out]] = newBuilder() } } }
Example 5
Source File: ItemName.scala From flamy with Apache License 2.0 | 5 votes |
package com.flaminem.flamy.model.names import com.flaminem.flamy.model.exceptions.FlamyException import org.rogach.scallop.ValueConverter import scala.collection.TraversableLike import scala.collection.generic.CanBuildFrom import scala.language.{higherKinds, implicitConversions} import scala.reflect.runtime.universe._ import scala.util.{Failure, Success, Try} def tryParse(s: String): Try[ItemName] = { Try{ SchemaName.parse(s) .orElse{ TableName.parse(s) } .orElse{ TablePartitionName.parse(s) } .getOrElse { throw new IllegalArgumentException("Wrong item name : " + s) } } } def tryParse(s: String, allowedTypes: Set[Class[_]]): Try[ItemName] = { tryParse(s).flatMap { case item if allowedTypes.contains(item.getClass) => Success(item) case item => Failure( new IllegalArgumentException( s"Item $item is a ${item.getClass.getSimpleName}, " + s"but only the following item types are allowed: ${allowedTypes.map{_.getSimpleName}.mkString("[", ", ", "]")}" ) ) } } implicit def fromStringTraversableLike[T[Z] <: TraversableLike[Z, T[Z]]] (l: T[String])(implicit bf: CanBuildFrom[T[String], ItemName, T[ItemName]]) : T[ItemName] = { l.map{tryParse(_).get} } private def fromArgs(args: Seq[String]): Either[String, Option[List[ItemName]]] = { val tries: Seq[Try[ItemName]] = args.map{tryParse} if(tries.forall{_.isSuccess}){ Right(Some(tries.map{_.get}.toList)) } else { val firstFailureIndex = tries.indexWhere(_.isFailure) Left(s"Could not parse the item name ${args(firstFailureIndex)}") } } implicit val scallopConverterList: ValueConverter[List[ItemName]] = { new ValueConverter[List[ItemName]] { override def parse(s: List[(String, List[String])]): Either[String, Option[List[ItemName]]] = { s match { case l if l.nonEmpty => fromArgs(l.flatMap{_._2}) case Nil => Right(None) } } override val tag: TypeTag[List[ItemName]] = typeTag[List[ItemName]] override val argType = org.rogach.scallop.ArgType.LIST } } }
Example 6
Source File: TableName.scala From flamy with Apache License 2.0 | 5 votes |
package com.flaminem.flamy.model.names import com.flaminem.flamy.parsing.ParsingUtils import org.rogach.scallop.ValueConverter import scala.language.implicitConversions import scala.reflect.runtime.universe._ import scala.util.matching.Regex class TableName private (val fullName: String) extends ItemName { lazy val schemaName: SchemaName = SchemaName(fullName.split('.')(0)) lazy val name: String = fullName.split('.')(1) def isInSchema(schema: ItemName): Boolean = schema match { case s: SchemaName => s.equals(schemaName) case default => false } override def isInOrEqual(that: ItemName): Boolean = that match { case name: SchemaName => this.isInSchema(name) case name: TableName => name==this case _ => false } } object TableName { // TODO: during parsing, table names with wrong names are allowed. We should probably add some safety about that. def apply(fullName: String): TableName = { new TableName(fullName.toLowerCase) } def unapply(tableName: TableName): Option[String] = Some(tableName.fullName) def apply(schemaName: String, tableName: String): TableName = new TableName(schemaName.toLowerCase + "." + tableName.toLowerCase) def apply(schemaName: SchemaName, tableName: String): TableName = new TableName(schemaName.fullName + "." + tableName.toLowerCase) implicit val order: Ordering[TableName] = new Ordering[TableName]{ override def compare(x: TableName, y: TableName): Int = x.fullName.compareTo(y.fullName) } val t: String = ParsingUtils.t val tableRegex: Regex = s"\\A$t[.]$t\\z".r def parse(s: String): Option[TableName] = { s match { case tableRegex() => Some(new TableName(s.toLowerCase)) case _ => None } } private def fromArg(arg: String): Either[String, Option[TableName]] = { val res: Option[TableName] = parse(arg) if(res.isDefined){ Right(Some(res.get)) } else { Left("") } } private def fromArgs(args: Seq[String]): Either[String, Option[List[TableName]]] = { val tries: Seq[Option[TableName]] = args.map{parse} if(tries.forall{_.isDefined}){ Right(Some(tries.map{_.get}.toList)) } else { Left("") } } implicit val scallopConverter: ValueConverter[TableName] = { new ValueConverter[TableName] { override def parse(s: List[(String, List[String])]): Either[String, Option[TableName]] = { s match { case l if l.nonEmpty => fromArg(l.flatMap{_._2}.head) case Nil => Right(None) } } override val tag: TypeTag[TableName] = typeTag[TableName] override val argType = org.rogach.scallop.ArgType.SINGLE } } implicit val scallopConverterList: ValueConverter[List[TableName]] = { new ValueConverter[List[TableName]] { override def parse(s: List[(String, List[String])]): Either[String, Option[List[TableName]]] = { s match { case l if l.nonEmpty => fromArgs(l.flatMap{_._2}) case Nil => Right(None) } } override val tag: TypeTag[List[TableName]] = typeTag[List[TableName]] override val argType = org.rogach.scallop.ArgType.LIST } } implicit def fromString(s: String): TableName = TableName(s) }
Example 7
Source File: TablePartitionName.scala From flamy with Apache License 2.0 | 5 votes |
package com.flaminem.flamy.model.names import com.flaminem.flamy.model.{PartitionColumn, PartitionKey} import com.flaminem.flamy.parsing.ParsingUtils import scala.language.{higherKinds, implicitConversions} class TablePartitionName(val fullName: String) extends ItemName { lazy val schemaName: SchemaName = SchemaName(tableFullName.split('.')(0)) lazy val tableName: TableName = TableName(tableFullName) private lazy val tableFullName: String = fullName.split('/')(0) lazy val partitionName: String = partColNames.mkString("/") lazy val partColNames: Seq[PartitionColumnName] = fullName.split('/').tail.map{PartitionColumnName} def partitionKeys: Seq[PartitionKey] = partColNames.map{_.toPartitionKey} def partitionColumns: Seq[PartitionColumn] = partColNames.map{_.toPartitionColumn} def isInSchema(schema: ItemName): Boolean = schema match { case s: SchemaName => s.equals(schemaName) case default => false } def isInTable(schema: ItemName): Boolean = schema match { case t: TableName => t.equals(tableName) case default => false } override def isInOrEqual(that: ItemName): Boolean = that match { case name: SchemaName => this.isInSchema(name) case name: TableName => this.isInTable(name) case name: TablePartitionName => name==this case _ => false } } object TablePartitionName { def apply(fullName: String): TablePartitionName = { parse(fullName).getOrElse{ throw new IllegalArgumentException(s"$fullName is not a correct TablePartitionName") } } def parse(s: String): Option[TablePartitionName] = { val t = ParsingUtils.t val tablePartitionRegex = s"\\A($t[.]$t)/(.*)\\z".r s match { case tablePartitionRegex(tableName, partitionName) => Some(new TablePartitionName(tableName.toLowerCase + "/" + partitionName)) case _ => None } } def unapply(tablePartitionName: TablePartitionName): Option[String] = Some(tablePartitionName.fullName) def apply(tableName: TableName, columnNames: PartitionColumnName*): TablePartitionName = { new TablePartitionName(tableName.fullName + "/" + columnNames.mkString("/")) } def apply(tableName: TableName, columnNames: String): TablePartitionName = { new TablePartitionName(tableName.fullName + "/" + columnNames) } implicit val order: Ordering[TablePartitionName] = new Ordering[TablePartitionName]{ override def compare(x: TablePartitionName, y: TablePartitionName): Int = x.fullName.compareTo(y.fullName) } }
Example 8
Source File: SchemaName.scala From flamy with Apache License 2.0 | 5 votes |
package com.flaminem.flamy.model.names import com.flaminem.flamy.parsing.ParsingUtils import scala.language.implicitConversions import scala.util.matching.Regex class SchemaName private (val fullName: String) extends ItemName { def name: String = fullName override def isInOrEqual(that: ItemName): Boolean = that match { case name: SchemaName => name==this case _ => false } } object SchemaName { def apply(fullName: String): SchemaName = { parse(fullName).getOrElse{ throw new IllegalArgumentException( s"$fullName is not a valid name for schemas. Valid names only contain alphabet characters, numbers and _." ) } } val t: String = ParsingUtils.t val schemaRegex: Regex = s"\\A$t\\z".r def parse(s: String): Option[SchemaName] = { s match { case schemaRegex() => Some(new SchemaName(s)) case _ => None } } def unapply(schemaName: SchemaName): Option[String] = Some(schemaName.fullName) implicit val order: Ordering[SchemaName] = new Ordering[SchemaName]{ override def compare(x: SchemaName, y: SchemaName): Int = x.fullName.compareTo(y.fullName) } implicit def toString(schema: SchemaName): String = schema.fullName }
Example 9
Source File: MergeableTableDependencyCollection.scala From flamy with Apache License 2.0 | 5 votes |
package com.flaminem.flamy.parsing.model import com.flaminem.flamy.model.TableInfo import com.flaminem.flamy.model.collection.immutable.MergeableTableInfoCollection import com.flaminem.flamy.model.names.TableName import com.flaminem.flamy.utils.collection.mutable.MergeableIndexedCollection import scala.language.implicitConversions class MergeableTableDependencyCollection() extends MergeableIndexedCollection[TableName, TableDependency] { def this(tableDependencies: Seq[TableDependency]){ this this ++= tableDependencies } def toTableDependencyCollection: TableDependencyCollection = { new TableDependencyCollection(this) } def toTableInfoCollection: MergeableTableInfoCollection = { MergeableTableInfoCollection(getAllValues.map{TableInfo(_)}.toSeq:_*) } override def getIndexOf(value: TableDependency): TableName = { value.fullName } } object MergeableTableDependencyCollection { implicit class MergeableTableDependencyCollectionConvertible(s: Seq[TableDependency]) { def toMergeableTableDependencyCollection: MergeableTableDependencyCollection = { new MergeableTableDependencyCollection(s) } } }
Example 10
Source File: ClauseResolution.scala From flamy with Apache License 2.0 | 5 votes |
package com.flaminem.flamy.parsing.model import scala.language.implicitConversions import scala.reflect.ClassTag case y : T => pf.isDefinedAt(y) case _ => false } } override def apply(x: Clause): Clause = { pf.apply(x.asInstanceOf[T]) } } object Rule { def apply[T <: Clause: ClassTag](pf: PartialFunction[T, Clause]): Rule[T] = new Rule(pf) } // type Rule = PartialFunction[Clause, Clause] import com.flaminem.flamy.parsing.model.Clause._ val simpleRules: Seq[Rule[Clause]] = Seq( Rule[And]{ case _ And False => False case False And _ => False case Maybe And _ => Maybe case _ And Maybe => Maybe case True And True => True }, Rule[Or]{ case _ Or True => True case True Or _ => True case Maybe Or _ => Maybe case _ Or Maybe => Maybe }, Rule[Not]{ case Not(False) => True case Not(True) => False case Not(Maybe) => Maybe } ) implicit def rulesApplier(rules: Seq[Rule[Clause]]): PartialFunction[Clause, Clause] = { case clause: Clause => rules.foldLeft(clause) { case (c, rule) if rule.isDefinedAt(c) => rule.apply(c) case (c, _) => c } } }
Example 11
Source File: ItemNameTest.scala From flamy with Apache License 2.0 | 5 votes |
package com.flaminem.flamy.model import com.flaminem.flamy.model.names.{ItemName, SchemaName, TableName, TablePartitionName} import org.scalatest.FunSpec import scala.language.implicitConversions class ItemNameTest extends FunSpec { describe("a SchemaName") { it("should be recognised from a String"){ assert(ItemName("db").isInstanceOf[SchemaName]) } it("should not recognize incorrect Strings"){ intercept[Exception]{ItemName("db.table").asInstanceOf[SchemaName]} intercept[Exception]{ItemName("col = true").asInstanceOf[SchemaName]} } it("should have correct attributes"){ val schemaName = ItemName("db").asInstanceOf[SchemaName] assert(schemaName.name === "db") assert(schemaName.fullName === "db") } it("should have correct membership methods"){ val schemaName = ItemName("db").asInstanceOf[SchemaName] assert(schemaName.isInOrEqual("db")) assert(!schemaName.isInOrEqual("db.table")) assert(!schemaName.isInOrEqual("db.table/part1=val1/part2=val2")) assert(!schemaName.isInOrEqual("toto")) assert(!schemaName.isInOrEqual("db.toto")) assert(!schemaName.isInOrEqual("db.table/part1=val1/part2=toto")) } } describe("a TableName") { val name = "db.table" it("should be recognised from a String") { val itemName = ItemName(name) assert(itemName.isInstanceOf[TableName]) } it("should have correct attributes"){ val tableName = ItemName(name).asInstanceOf[TableName] assert(tableName.name === "table") assert(tableName.schemaName.name === "db") assert(tableName.fullName === "db.table") } it("should have correct membership methods"){ val tableName = ItemName(name).asInstanceOf[TableName] assert(tableName.isInSchema("db")) assert(!tableName.isInSchema("toto")) assert(tableName.isInOrEqual("db")) assert(tableName.isInOrEqual("db.table")) assert(!tableName.isInOrEqual("db.table/part1=val1/part2=val2")) assert(!tableName.isInOrEqual("toto")) assert(!tableName.isInOrEqual("db.toto")) assert(!tableName.isInOrEqual("db.table/part1=val1/part2=toto")) } } describe("a TablePartitionName") { val name = "db.table/part1=val1/part2=val2" it("should be recognised from a String"){ val itemName = ItemName(name) assert(itemName.isInstanceOf[TablePartitionName]) } it("should have correct attributes"){ val partitionName = ItemName(name).asInstanceOf[TablePartitionName] assert(partitionName.tableName.fullName=== "db.table") assert(partitionName.tableName.name === "table") assert(partitionName.schemaName.name === "db") assert(partitionName.partitionName === "part1=val1/part2=val2") } it("should have correct membership methods"){ val partitionName = ItemName(name).asInstanceOf[TablePartitionName] assert(partitionName.isInSchema("db")) assert(!partitionName.isInSchema("toto")) assert(partitionName.isInTable("db.table")) assert(!partitionName.isInTable("db")) assert(!partitionName.isInTable("db.toto")) assert(!partitionName.isInTable("toto.table")) assert(partitionName.isInOrEqual("db")) assert(partitionName.isInOrEqual("db.table")) assert(partitionName.isInOrEqual("db.table/part1=val1/part2=val2")) assert(!partitionName.isInOrEqual("toto")) assert(!partitionName.isInOrEqual("db.toto")) assert(!partitionName.isInOrEqual("db.table/part1=val1/part2=toto")) } } }
Example 12
Source File: SemVerIdentifierList.scala From sbt-git-versioning with MIT License | 5 votes |
package com.rallyhealth.sbt.versioning import scala.language.implicitConversions override def compare(that: SemVerIdentifierList): Int = { val thisIdentifiers = values.sorted val thatIdentifiers = that.values.sorted if (thisIdentifiers.isEmpty && thatIdentifiers.isEmpty) { 0 // an empty list of identifiers is considered GREATER than a non-empty list, see http://semver.org/#spec-item-11 } else if (thisIdentifiers.isEmpty) { 1 // an empty list of identifiers is considered GREATER than a non-empty list, see http://semver.org/#spec-item-11 } else if (thatIdentifiers.isEmpty) { -1 } else { thisIdentifiers.zipAll(thatIdentifiers, SemVerIdentifier.Empty, SemVerIdentifier.Empty).foldLeft(0) { case (result, _) if result != 0 => result case (_, (a, b)) => a.compare(b) } } } def :+(other: SemVerIdentifier): SemVerIdentifierList = SemVerIdentifierList(values :+ other) def ++(other: SemVerIdentifierList): SemVerIdentifierList = SemVerIdentifierList(values ++ other.values) def ++(other: Seq[SemVerIdentifier]): SemVerIdentifierList = SemVerIdentifierList(values ++ other) def ++(other: Option[SemVerIdentifier]): SemVerIdentifierList = this ++ other.toSeq } object SemVerIdentifierList { val separatorChar = '-' val empty = SemVerIdentifierList(Seq.empty) implicit def identifier2IdentifierList(id: SemVerIdentifier): SemVerIdentifierList = SemVerIdentifierList(Seq(id)) implicit def identifierSeq2IdentifierList(values: Seq[SemVerIdentifier]): SemVerIdentifierList = SemVerIdentifierList(values) implicit def stringIdentifierSeq2IdentifierList(values: Seq[String]): SemVerIdentifierList = SemVerIdentifierList(values.map(StringSemVerIdentifier(_))) }
Example 13
Source File: LowerBoundedSemanticVersionSpec.scala From sbt-git-versioning with MIT License | 5 votes |
package com.rallyhealth.sbt.versioning import com.rallyhealth.sbt.versioning.LowerBoundedSemanticVersion._ import org.scalatest.{FunSpec, Matchers} import scala.language.implicitConversions class LowerBoundedSemanticVersionSpec extends FunSpec with Matchers { private val hash1 = HashSemVerIdentifier("0123abc") // for these tests we need a full hash, not an abbreviation private val hashAndCount1 = HashAndCount(hash1, 1) describe("BoundedSemanticVersion") { describe("ReleaseVersion") { it("equal bound") { val version = ReleaseVersion(1, 2, 3, SemVerIdentifierList.empty, isDirty = false) val bound = LowerBound(1, 2, 3) val result = version.lowerBound(bound, hashAndCount1) assert(result === version) } it("lower bound") { val version = ReleaseVersion(1, 2, 3, SemVerIdentifierList.empty, isDirty = false) val bound = LowerBound(1, 0, 0) val result = version.lowerBound(bound, hashAndCount1) assert(result === version) } it("higher bound") { val version = ReleaseVersion(1, 2, 3, SemVerIdentifierList.empty, isDirty = false) val bound = LowerBound(1, 2, 4) an[IllegalArgumentException] shouldBe thrownBy { version.lowerBound(bound, hashAndCount1) } } } describe("SnapshotVersion") { it("equal bound") { val version = SnapshotVersion(1, 2, 3, SemVerIdentifierList.empty, isDirty = false, hashAndCount1, 1) val result = version.lowerBound(LowerBound(1, 2, 3), hashAndCount1) assert(result === version) } it("lower bound") { val version = SnapshotVersion(1, 2, 3, SemVerIdentifierList.empty, isDirty = false, hashAndCount1, 1) val bound = LowerBound(1, 0, 0) val result = version.lowerBound(bound, hashAndCount1) assert(result === version) assert(result.toString === s"1.2.3-1-$hash1-SNAPSHOT") } it("greater bound") { val version = SnapshotVersion(1, 2, 3, SemVerIdentifierList.empty, isDirty = false, hashAndCount1, 1) val bound = LowerBound(2, 0, 0) val result = version.lowerBound(bound, hashAndCount1) assert(result.toString === s"2.0.0-1-$hash1-SNAPSHOT") } it("version is dirty") { val version = SnapshotVersion(1, 2, 3, SemVerIdentifierList.empty, isDirty = true, hashAndCount1, 1) val bound = LowerBound(2, 0, 0) val result = version.lowerBound(bound, hashAndCount1) assert(result.toString === s"2.0.0-1-$hash1-dirty-SNAPSHOT") } it("bound is dirty") { val version = SnapshotVersion(1, 2, 3, SemVerIdentifierList.empty, isDirty = false, hashAndCount1, 1) val bound = LowerBound(2, 0, 0) val result = version.lowerBound(bound, hashAndCount1) assert(result.toString === s"2.0.0-1-$hash1-SNAPSHOT") } it("identifiers") { val version = SnapshotVersion(1, 0, 0, Seq("rc.0"), isDirty = false, hashAndCount1, 1) val bound = LowerBound(2, 0, 0) val result = version.lowerBound(bound, hashAndCount1) assert(result === SnapshotVersion(2, 0, 0, SemVerIdentifierList.empty, false, hashAndCount1, 1)) } } } implicit def hashAndCountAsHash(hc: HashAndCount): HashSemVerIdentifier = hc.hash }
Example 14
Source File: CirceSuite.scala From circe-jackson with Apache License 2.0 | 5 votes |
package io.circe.jackson import cats.instances.AllInstances import cats.syntax.{ AllSyntax, EitherOps } import io.circe.Json import io.circe.testing.{ ArbitraryInstances, EqInstances } import org.scalatest.flatspec.AnyFlatSpec import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks import org.typelevel.discipline.scalatest.FlatSpecDiscipline import scala.language.implicitConversions trait CirceSuite extends AnyFlatSpec with FlatSpecDiscipline with ScalaCheckDrivenPropertyChecks with AllInstances with AllSyntax with ArbitraryInstances with EqInstances { override def convertToEqualizer[T](left: T): Equalizer[T] = sys.error("Intentionally ambiguous implicit for Equalizer") implicit def prioritizedCatsSyntaxEither[A, B](eab: Either[A, B]): EitherOps[A, B] = new EitherOps(eab) val glossary: Json = Json.obj( "glossary" -> Json.obj( "title" -> Json.fromString("example glossary"), "GlossDiv" -> Json.obj( "title" -> Json.fromString("S"), "GlossList" -> Json.obj( "GlossEntry" -> Json.obj( "ID" -> Json.fromString("SGML"), "SortAs" -> Json.fromString("SGML"), "GlossTerm" -> Json.fromString("Standard Generalized Markup Language"), "Acronym" -> Json.fromString("SGML"), "Abbrev" -> Json.fromString("ISO 8879:1986"), "GlossDef" -> Json.obj( "para" -> Json.fromString( "A meta-markup language, used to create markup languages such as DocBook." ), "GlossSeeAlso" -> Json.arr(Json.fromString("GML"), Json.fromString("XML")) ), "GlossSee" -> Json.fromString("markup") ) ) ) ) ) }
Example 15
Source File: Cards.scala From Scalaprof with GNU General Public License v2.0 | 5 votes |
package edu.neu.coe.csye._7200.enums.enumeration object Rank extends Enumeration { type Rank = Value val Deuce, Trey, Four, Five, Six, Seven, Eight, Nine, Ten, Knave, Queen, King, Ace = Value class RankValue(rank: Value) { def isSpot = !isHonor def isHonor = rank match { case Ace | King | Queen | Knave | Ten => true case _ => false } } implicit def value2RankValue(rank: Value) = new RankValue(rank) } object Suit extends Enumeration { type Suit = Value val Clubs, Diamonds, Hearts, Spades = Value class SuitValue(suit: Value) { def isRed = !isBlack def isBlack = suit match { case Clubs | Spades => true case _ => false } } implicit def value2SuitValue(suit: Value) = new SuitValue(suit) } import edu.neu.coe.csye._7200.enums.enumeration.Rank._ import edu.neu.coe.csye._7200.enums.enumeration.Suit._ import scala.language.implicitConversions case class Card (rank: Rank, suit: Suit)
Example 16
Source File: thunk.scala From Scala-High-Performance-Programming with MIT License | 5 votes |
package highperfscala.free import scala.language.{higherKinds, implicitConversions, postfixOps} import scalaz.{-\/, Free, Functor, \/, \/-} case class LimitMs(value: Long) extends AnyVal sealed trait Thunk[A] case class Timed[A]( whenActive: () => A, whenExpired: () => A, limit: LimitMs) extends Thunk[A] case class StartProcessing[A]( whenActive: BboUpdated => A, whenExpired: BboUpdated => A, limit: LimitMs) extends Thunk[A] case class TradingDecision[A]( makeDecision: TradingStrategy => A) extends Thunk[A] object Thunk { implicit val functor: Functor[Thunk] = new Functor[Thunk] { def map[A, B](t: Thunk[A])(f: (A) => B): Thunk[B] = t match { case Timed(whenActive, whenExpired, limit) => Timed(() => f(whenActive()), () => f(whenExpired()), limit) case StartProcessing(whenActive, whenExpired, limit) => StartProcessing(c => f(whenActive(c)), c => f(whenExpired(c)), limit) case TradingDecision(makeDecision) => TradingDecision( (makeDecision.apply _).andThen(f)) } } def timed[L, R]( f: () => R, exp: () => L, limit: LimitMs): Free[Thunk, L \/ R] = Free.liftF( Timed(() => \/-(f()), () => -\/(exp()), limit)) def startProcessing[L, R]( f: BboUpdated => R, exp: BboUpdated => L, limit: LimitMs): Free[Thunk, L \/ R] = Free.liftF(StartProcessing(f.andThen(\/-(_)), exp.andThen(-\/(_)), limit)) def tradingDecision[L, R](f: TradingStrategy => R): Free[Thunk, L \/ R] = Free.liftF(TradingDecision((f.apply _).andThen(\/-(_)))) }
Example 17
Source File: PlayJsonSupport.scala From akka-cluster-manager with MIT License | 5 votes |
package io.orkestra.cluster.management import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model.HttpCharsets import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.stream.Materializer import play.api.libs.json._ import scala.concurrent.ExecutionContext import scala.language.implicitConversions trait PlayJsonSupport { type Printer = (JsValue ⇒ String) def read[T](jsValue: JsValue)(implicit reads: Reads[T]): T = { reads.reads(jsValue) match { case s: JsSuccess[T] ⇒ s.get case e: JsError ⇒ throw JsResultException(e.errors) } } implicit def playJsonUnmarshallerConverter[T](reads: Reads[T])(implicit ec: ExecutionContext, mat: Materializer): FromEntityUnmarshaller[T] = playJsonUnmarshaller(reads, ec, mat) implicit def playJsonUnmarshaller[T](implicit reads: Reads[T], ec: ExecutionContext, mat: Materializer): FromEntityUnmarshaller[T] = playJsValueUnmarshaller.map(read[T]) implicit def playJsValueUnmarshaller(implicit ec: ExecutionContext, mat: Materializer): FromEntityUnmarshaller[JsValue] = Unmarshaller.byteStringUnmarshaller.forContentTypes(`application/json`).mapWithCharset { (data, charset) ⇒ if (charset == HttpCharsets.`UTF-8`) Json.parse(data.toArray) else Json.parse(data.decodeString(charset.nioCharset.name)) // FIXME: identify charset by instance, not by name! } implicit def playJsonMarshallerConverter[T](writes: Writes[T])(implicit printer: Printer = Json.prettyPrint, ec: ExecutionContext): ToEntityMarshaller[T] = playJsonMarshaller[T](writes, printer, ec) implicit def playJsonMarshaller[T](implicit writes: Writes[T], printer: Printer = Json.prettyPrint, ec: ExecutionContext): ToEntityMarshaller[T] = playJsValueMarshaller[T].compose(writes.writes) implicit def playJsValueMarshaller[T](implicit writes: Writes[T], printer: Printer = Json.prettyPrint, ec: ExecutionContext): ToEntityMarshaller[JsValue] = Marshaller.StringMarshaller.wrap(`application/json`)(printer) } object PlayJsonSupport extends PlayJsonSupport
Example 18
package ing.wbaa.druid.dql import scala.language.implicitConversions import ing.wbaa.druid.definitions.ArithmeticFunction import ing.wbaa.druid.dql.expressions._ object DSL extends FilteringExpressionOps with ExtractionFnOps with AggregationOps with PostAggregationOps { def dim(name: String): Dim = Dim(name) implicit class StringOps(val value: String) extends AnyVal { def ===(s: Dim): FilteringExpression = s === value def =!=(s: Dim): FilteringExpression = s =!= value } implicit class NumOps(val value: Double) extends AnyVal { @inline private def arithmeticPostAgg(s: Dim, fn: ArithmeticFunction): PostAggregationExpression = ArithmeticPostAgg( new ConstantPostAgg(value), new FieldAccessPostAgg(s.name), fn = fn ) def ===(s: Dim): FilteringExpression = s === value def =!=(s: Dim): FilteringExpression = s =!= value def >(s: Dim): FilteringExpression = s < value def >=(s: Dim): FilteringExpression = s <= value def <(s: Dim): FilteringExpression = s > value def <=(s: Dim): FilteringExpression = s >= value def +(s: Symbol): PostAggregationExpression = arithmeticPostAgg(s, ArithmeticFunction.PLUS) def -(s: Symbol): PostAggregationExpression = arithmeticPostAgg(s, ArithmeticFunction.MINUS) def *(s: Symbol): PostAggregationExpression = arithmeticPostAgg(s, ArithmeticFunction.MULT) def /(s: Symbol): PostAggregationExpression = arithmeticPostAgg(s, ArithmeticFunction.DIV) def quotient(s: Symbol): PostAggregationExpression = arithmeticPostAgg(s, ArithmeticFunction.QUOT) } }
Example 19
Source File: ParameterConversions.scala From scruid with Apache License 2.0 | 5 votes |
package ing.wbaa.druid.sql import java.sql.Timestamp import java.time.{ Instant, LocalDate, LocalDateTime } import scala.language.implicitConversions import ing.wbaa.druid.{ DruidConfig, SQLQueryParameter, SQLQueryParameterType } trait ParameterConversions { implicit def char2Param(v: Char): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Char, v.toString) implicit def string2Param(v: String): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Varchar, v) implicit def byte2Param(v: Byte): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Tinyint, v.toString) implicit def short2Param(v: Short): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Smallint, v.toString) implicit def int2Param(v: Int): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Integer, v.toString) implicit def long2Param(v: Long): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Bigint, v.toString) implicit def float2Param(v: Float): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Float, v.toString) implicit def double2Param(v: Double): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Double, v.toString) implicit def boolean2Param(v: Boolean): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Boolean, v.toString) implicit def localDate2Param(v: LocalDate)(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Date, v.format(config.FormatterDate)) implicit def localDateTime2Param( v: LocalDateTime )(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Timestamp, v.format(config.FormatterDateTime)) implicit def timestamp2Param(v: Timestamp)(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Timestamp, config.FormatterDateTime.format(v.toInstant)) implicit def instant2Param( v: Instant )(implicit config: DruidConfig = DruidConfig.DefaultConfig): SQLQueryParameter = SQLQueryParameter(SQLQueryParameterType.Timestamp, config.FormatterDateTime.format(v)) }
Example 20
Source File: IntellijPluginInstallerTestBase.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.plugin import java.nio.file.Path import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.ConsoleLogger import org.jetbrains.sbtidea.download.api.InstallContext import org.jetbrains.sbtidea.download.idea.IdeaMock import org.jetbrains.sbtidea.download.BuildInfo import org.jetbrains.sbtidea.download.jbr.JbrDependency import org.scalatest.{FunSuite, Matchers} import org.jetbrains.sbtidea.pathToPathExt import sbt._ import scala.language.implicitConversions trait IntellijPluginInstallerTestBase extends FunSuite with Matchers with IdeaMock with PluginMock with ConsoleLogger { protected lazy val ideaRoot: Path = installIdeaMock protected val pluginsRoot: Path = ideaRoot / "plugins" protected val ideaBuild: BuildInfo = BuildInfo(IDEA_VERSION, IntelliJPlatform.IdeaUltimate, Some(JbrDependency.VERSION_AUTO)) protected implicit val defaultBuildInfo: BuildInfo = IDEA_BUILDINFO protected implicit val localRegistry: LocalPluginRegistryApi = new LocalPluginRegistry(ideaRoot) protected implicit val repoAPI: PluginRepoApi = new PluginRepoUtils protected def createInstaller(implicit buildInfo: BuildInfo = IDEA_BUILDINFO): RepoPluginInstaller = new RepoPluginInstaller(buildInfo) protected implicit def plugin2PluginDep(pl: IntellijPlugin)(implicit buildInfo: BuildInfo): PluginDependency = PluginDependency(pl, buildInfo) protected implicit def plugin2PluginArt(pl: IntellijPlugin): RemotePluginArtifact = RemotePluginArtifact(pl, new URL("file:")) protected implicit def installContext: InstallContext = InstallContext(ideaRoot, ideaRoot.getParent) }
Example 21
Source File: IntellijPluginResolverTestBase.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.plugin import java.nio.file.{Path, Paths} import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.download.plugin.PluginDescriptor.Dependency import org.jetbrains.sbtidea.{Keys, download, pathToPathExt} import org.scalatest.Inside import sbt._ import scala.language.implicitConversions abstract class IntellijPluginResolverTestBase extends IntellijPluginInstallerTestBase with Inside { protected val pluginA: PluginDescriptor = PluginDescriptor("org.A", "A - bundled", "0", "", "") protected val pluginB: PluginDescriptor = PluginDescriptor("org.B", "B - remote", "0", "", "") protected val pluginC: PluginDescriptor = PluginDescriptor("org.C", "C - remote", "0", "", "", Seq(Dependency("org.A", optional = true), Dependency("org.B", optional = false))) protected val pluginD: PluginDescriptor = PluginDescriptor("org.D", "D - remote cyclic", "0", "", "", Seq(Dependency("org.E", optional = false), Dependency("org.A", optional = true))) protected val pluginE: PluginDescriptor = PluginDescriptor("org.E", "C - remote cyclic", "0", "", "", Seq(Dependency("org.D", optional = false), Dependency("org.C", optional = true))) protected val descriptorMap: Map[String, PluginDescriptor] = Seq(pluginA, pluginB, pluginC, pluginD, pluginE).map(p => p.id -> p).toMap protected implicit def descriptor2Plugin(descriptor: PluginDescriptor): PluginDependency = PluginDependency(Keys.IntellijPlugin.Id(descriptor.id, None, None), IDEA_BUILDINFO, descriptor.dependsOn.map(p => plugin2PluginDep(p.id.toPlugin))) override protected implicit val localRegistry: LocalPluginRegistryApi = new LocalPluginRegistryApi { override def getPluginDescriptor(ideaPlugin: Keys.IntellijPlugin): Either[String, PluginDescriptor] = ideaPlugin match { case IntellijPlugin.Url(_) => throw new IllegalArgumentException("url plugin not supported") case IntellijPlugin.Id(id, _, _) => descriptorMap.get(id).filterNot(_.name.contains("remote")).toRight("plugin is remote") case IntellijPlugin.BundledFolder(name) => descriptorMap.get(name).filterNot(_.name.contains("remote")).toRight("plugin is remote") } override def isPluginInstalled(ideaPlugin: Keys.IntellijPlugin): Boolean = ideaPlugin match { case IntellijPlugin.Url(_) => false case IntellijPlugin.Id(id, _, _) => descriptorMap.get(id).exists(_.name.contains("bundled")) case IntellijPlugin.BundledFolder(name) => descriptorMap.get(name).exists(_.name.contains("bundled")) } override def getAllDescriptors: Seq[PluginDescriptor] = descriptorMap.values.toSeq override def markPluginInstalled(ideaPlugin: Keys.IntellijPlugin, to: Path): Unit = () override def getInstalledPluginRoot(ideaPlugin: Keys.IntellijPlugin): Path = Paths.get("INVALID") } override protected implicit val repoAPI: PluginRepoApi = new PluginRepoApi { override def getRemotePluginXmlDescriptor(idea: download.BuildInfo, pluginId: String, channel: Option[String]): Either[Throwable, PluginDescriptor] = descriptorMap.get(pluginId).filter(_.name.contains("remote")).toRight(null) override def getPluginDownloadURL(idea: download.BuildInfo, pluginInfo: Keys.IntellijPlugin.Id): URL = new URL("file:INVALID") override def getLatestPluginVersion(idea: download.BuildInfo, pluginId: String, channel: Option[String]): Either[Throwable, String] = throw new IllegalArgumentException } }
Example 22
Source File: IntellijPluginResolverTest.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.plugin import org.jetbrains.sbtidea.CapturingLogger._ import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.pathToPathExt import sbt._ import scala.language.implicitConversions class IntellijPluginResolverTest extends IntellijPluginResolverTestBase { test("plugin resolver doesn't resolve fake plugin") { val fakePlugin = "org.myFake.plugin:0.999:trunk".toPlugin val messages = captureLog(new PluginResolver(resolveSettings = fakePlugin.resolveSettings).resolve(fakePlugin) shouldBe empty) messages should contain ("Failed to resolve PluginDependency(org.myFake.plugin): null") } test("transitive plugin dependencies are resolved") { val res = new PluginResolver(resolveSettings = pluginC.plugin.resolveSettings).resolve(pluginC) inside(res) { case RemotePluginArtifact(c, _) :: LocalPlugin(a, _, _) :: RemotePluginArtifact(b, _) :: Nil => c shouldBe descriptor2Plugin(pluginC) a shouldBe descriptor2Plugin(pluginA) b shouldBe descriptor2Plugin(pluginB) } } test("cyclic dependencies are detected") { val resolver = new PluginResolver(resolveSettings = pluginE.plugin.resolveSettings) val (messages, result) = captureLogAndValue(resolver.resolve(pluginE)) messages shouldBe Seq("Circular plugin dependency detected: PluginDependency(org.E) already processed") result.size shouldBe 3 } test("plugin exclude rules work") { val newResolveSettings = IntellijPlugin.Settings(excludedIds = Set(pluginA.id)) val res = new PluginResolver(resolveSettings = newResolveSettings).resolve(pluginC) inside(res) { case RemotePluginArtifact(c, _) :: RemotePluginArtifact(b, _) :: Nil => c shouldBe descriptor2Plugin(pluginC) // a shouldBe descriptor2Plugin(pluginA) b shouldBe descriptor2Plugin(pluginB) } } test("disable transitive resolution") { val newResolveSettings = IntellijPlugin.Settings(transitive = false) val res = new PluginResolver(resolveSettings = newResolveSettings).resolve(pluginC) inside(res) { case RemotePluginArtifact(c, _) :: Nil => c shouldBe descriptor2Plugin(pluginC) } } test("disable optional plugins") { val newResolveSettings = IntellijPlugin.Settings(optionalDeps = false) val res = new PluginResolver(resolveSettings = newResolveSettings).resolve(pluginC) inside(res) { case RemotePluginArtifact(c, _) :: RemotePluginArtifact(b, _) :: Nil => c shouldBe descriptor2Plugin(pluginC) // a shouldBe descriptor2Plugin(pluginA) b shouldBe descriptor2Plugin(pluginB) } } }
Example 23
Source File: JbIdeaRepoArtifactResolverTest.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.idea import org.jetbrains.sbtidea.ConsoleLogger import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.download.BuildInfo import org.scalatest.{FunSuite, Matchers} import scala.language.implicitConversions class JbIdeaRepoArtifactResolverTest extends FunSuite with Matchers with IdeaMock with ConsoleLogger { private def createResolver: IJRepoIdeaResolver = new IJRepoIdeaResolver private implicit def buildInfo2Dep(buildInfo: BuildInfo): IdeaDependency = IdeaDependency(buildInfo) test("latest eap is resolved") { val resolver = createResolver val result = resolver.resolve(BuildInfo("LATEST-EAP-SNAPSHOT", IntelliJPlatform.IdeaCommunity, None)) result should not be empty } test("latest 192 release is resolved") { val resolver = createResolver val result = resolver.resolve(BuildInfo("192.6603.28", IntelliJPlatform.IdeaCommunity, None)) result should not be empty } test("latest 191 release is resolved") { val resolver = createResolver val result = resolver.resolve(BuildInfo("191.7479.19", IntelliJPlatform.IdeaCommunity, None)) result should not be empty } }
Example 24
Source File: SbtPackageProjectData.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.packaging.structure.sbtImpl import java.io.File import org.jetbrains.sbtidea.packaging.PackagingKeys.ExcludeFilter._ import org.jetbrains.sbtidea.packaging.PackagingKeys.{PackagingMethod, ShadePattern} import org.jetbrains.sbtidea.structure.sbtImpl.CommonSbtProjectData import sbt.Def.Classpath import sbt._ import scala.language.implicitConversions case class SbtPackageProjectData(thisProject: ProjectRef, cp: Classpath, definedDeps: Seq[ModuleID], additionalProjects: Seq[Project], assembleLibraries: Boolean, productDirs: Seq[File], report: UpdateReport, libMapping: Seq[(ModuleID, Option[String])], additionalMappings: Seq[(File, String)], packageMethod: PackagingMethod, shadePatterns: Seq[ShadePattern], excludeFilter: ExcludeFilter) extends CommonSbtProjectData
Example 25
Source File: SbtPackagingStructureExtractor.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.packaging.structure.sbtImpl import org.jetbrains.sbtidea.PluginLogger import org.jetbrains.sbtidea.packaging.{PackagingKeys, structure} import org.jetbrains.sbtidea.packaging.structure.ProjectPackagingOptions import org.jetbrains.sbtidea.structure.sbtImpl._ import sbt._ import sbt.jetbrains.ideaPlugin.apiAdapter._ import scala.language.implicitConversions class SbtPackagingStructureExtractor(override val rootProject: ProjectRef, override val projectsData: Seq[SbtPackageProjectData], override val buildDependencies: BuildDependencies, _log: PluginLogger) extends SbtProjectStructureExtractorBase { override type ProjectDataType = SbtPackageProjectData override type NodeType = SbtPackagedProjectNodeImpl override implicit val log: PluginLogger = _log private def validateProjectData(data: SbtPackageProjectData): Unit = { val unresolvedRefs = data.additionalProjects.map(x => x -> findProjectRef(x)).filter(_._2.isEmpty) if (unresolvedRefs.nonEmpty) throw new SbtProjectExtractException(s"Failed to resolve refs for projects: $unresolvedRefs") val unmappedProjects = data.additionalProjects.flatMap(findProjectRef).map(x => x -> projectCache.get(x)).filter(_._2.isEmpty) if (unmappedProjects.nonEmpty) throw new SbtProjectExtractException(s"No stubs for project refs found: ${unmappedProjects.map(_._1)}") } override protected def collectAdditionalProjects(data: SbtPackageProjectData, direct: Seq[ProjectRef]): Seq[ProjectRef] = data.additionalProjects.flatMap(findProjectRef).foldLeft(direct) { case (q, r) => topoSortRefs(r, q) } private def collectPackagingOptions(data: SbtPackageProjectData): ProjectPackagingOptions = { implicit val scalaVersion: ProjectScalaVersion = ProjectScalaVersion(data.definedDeps.find(_.name == "scala-library")) validateProjectData(data) SbtProjectPackagingOptionsImpl( data.packageMethod, data.libMapping.map(x => x._1.key -> x._2), data.additionalMappings, data.shadePatterns, data.excludeFilter, data.productDirs, data.assembleLibraries, data.additionalProjects.map(x => projectCache(findProjectRef(x).get)) ) } override def buildStub(data: SbtPackageProjectData): SbtPackagedProjectNodeImpl = SbtPackagedProjectNodeImpl(data.thisProject, null, null, null, null) override def updateNode(node: SbtPackagedProjectNodeImpl, data: SbtPackageProjectData): SbtPackagedProjectNodeImpl = { val options = collectPackagingOptions(data) val children = collectChildren(node, data) val parents = collectParents(node, data) val libs = collectLibraries(data) node.packagingOptions = options node.children = children node.parents = parents node.libs = libs node } implicit def keys2Structure(p: PackagingKeys.PackagingMethod): structure.PackagingMethod = p match { case PackagingKeys.PackagingMethod.Skip() => structure.PackagingMethod.Skip() case PackagingKeys.PackagingMethod.MergeIntoParent() => structure.PackagingMethod.MergeIntoParent() case PackagingKeys.PackagingMethod.DepsOnly(targetPath) => structure.PackagingMethod.DepsOnly(targetPath) case PackagingKeys.PackagingMethod.Standalone(targetPath, static) => structure.PackagingMethod.Standalone(targetPath, static) case PackagingKeys.PackagingMethod.MergeIntoOther(project) => structure.PackagingMethod.MergeIntoOther(findProjectRef(project).map(projectCache).getOrElse(???)) } }
Example 26
Source File: package.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.structure import sbt._ import scala.language.implicitConversions package object sbtImpl { case class ProjectScalaVersion(libModule: Option[ModuleID]) { def isDefined: Boolean = libModule.isDefined def str: String = libModule.map(_.revision).getOrElse("") } implicit class ModuleIdExt(val moduleId: ModuleID) extends AnyVal { def key(implicit scalaVersion: ProjectScalaVersion): ModuleKey = { val versionSuffix = moduleId.crossVersion match { case _:CrossVersion.Binary if scalaVersion.isDefined => "_" + CrossVersion.binaryScalaVersion(scalaVersion.str) case _ => "" } ModuleKeyImpl( moduleId.organization % (moduleId.name + versionSuffix) % moduleId.revision, moduleId.extraAttributes .map { case (k, v) => k.stripPrefix("e:") -> v } .filter { case (k, _) => k == "scalaVersion" || k == "sbtVersion" }) } } class SbtProjectExtractException(message: String) extends Exception(message) private[sbtImpl] def extractProjectName(project: ProjectReference): String = { val str = project.toString val commaIdx = str.indexOf(',') str.substring(commaIdx+1, str.length-1) } }
Example 27
Source File: ScalaKinesisProducer.scala From kpl-scala with Apache License 2.0 | 5 votes |
package com.contxt.kinesis import com.amazonaws.services.kinesis.producer.{ KinesisProducer, KinesisProducerConfiguration, UserRecordResult } import com.google.common.util.concurrent.ListenableFuture import com.typesafe.config.{ Config, ConfigFactory } import java.nio.ByteBuffer import scala.concurrent._ import scala.language.implicitConversions import scala.util.Try import collection.JavaConverters._ import scala.concurrent.ExecutionContext.Implicits.global def shutdown(): Future[Unit] } object ScalaKinesisProducer { def apply( streamName: String, kplConfig: KinesisProducerConfiguration, config: Config = ConfigFactory.load() ): ScalaKinesisProducer = { val producerStats = ProducerStats.getInstance(config) ScalaKinesisProducer(streamName, kplConfig, producerStats) } def apply( streamName: String, kplConfig: KinesisProducerConfiguration, producerStats: ProducerStats ): ScalaKinesisProducer = { val streamId = StreamId(kplConfig.getRegion, streamName) val producer = new KinesisProducer(kplConfig) new ScalaKinesisProducerImpl(streamId, producer, producerStats) } private[kinesis] implicit def listenableToScalaFuture[A](listenable: ListenableFuture[A]): Future[A] = { val promise = Promise[A] val callback = new Runnable { override def run(): Unit = promise.tryComplete(Try(listenable.get())) } listenable.addListener(callback, ExecutionContext.global) promise.future } } private[kinesis] class ScalaKinesisProducerImpl( val streamId: StreamId, private val producer: KinesisProducer, private val stats: ProducerStats ) extends ScalaKinesisProducer { import ScalaKinesisProducer.listenableToScalaFuture stats.reportInitialization(streamId) def send(partitionKey: String, data: ByteBuffer, explicitHashKey: Option[String]): Future[UserRecordResult] = { stats.trackSend(streamId, data.remaining) { producer.addUserRecord(streamId.streamName, partitionKey, explicitHashKey.orNull, data).map { result => if (!result.isSuccessful) throwSendFailedException(result) else result } } } def shutdown(): Future[Unit] = shutdownOnce private lazy val shutdownOnce: Future[Unit] = { val allFlushedFuture = flushAll() val shutdownPromise = Promise[Unit] allFlushedFuture.onComplete { _ => shutdownPromise.completeWith(destroyProducer()) } val combinedFuture = allFlushedFuture.zip(shutdownPromise.future).map(_ => ()) combinedFuture.onComplete(_ => stats.reportShutdown(streamId)) combinedFuture } private def throwSendFailedException(result: UserRecordResult): Nothing = { val attemptCount = result.getAttempts.size val errorMessage = result.getAttempts.asScala.lastOption.map(_.getErrorMessage) throw new RuntimeException( s"Sending a record to $streamId failed after $attemptCount attempts, last error message: $errorMessage." ) } private def flushAll(): Future[Unit] = { Future { blocking { producer.flushSync() } } } private def destroyProducer(): Future[Unit] = { Future { blocking { producer.destroy() } } } }
Example 28
Source File: ExecutorContext.scala From sona with Apache License 2.0 | 5 votes |
package com.tencent.angel.sona.core import com.tencent.angel.mlcore.conf.SharedConf import com.tencent.angel.psagent.PSAgent import org.apache.spark.internal.CompatibleLogging import org.apache.spark.sql.SPKSQLUtils import scala.language.implicitConversions case class ExecutorContext(conf: SharedConf, numTask: Int) extends PSAgentContext(conf) with CompatibleLogging with Serializable{ @transient override lazy val sparkWorkerContext: SparkWorkerContext = { if (psAgent == null) { throw new Exception("Pls. startAngel first!") } SparkWorkerContext(psAgent) } } object ExecutorContext { @transient private var graphModelPool: GraphModelPool = _ @transient private var psAgent: PSAgent = _ def getPSAgent(exeCtx: ExecutorContext): PSAgent = synchronized { while (psAgent == null) { SPKSQLUtils.registerUDT() psAgent = exeCtx.createAndInitPSAgent } com.tencent.angel.psagent.PSAgentContext.get().getPsAgent.refreshMatrixInfo() psAgent } def stopPSAgent(): Unit = synchronized { while (psAgent != null) { psAgent.stop() psAgent = null } } private def checkGraphModelPool(exeCtx: ExecutorContext): Unit = { getPSAgent(exeCtx) if (graphModelPool == null) { graphModelPool = new GraphModelPool(exeCtx.sparkWorkerContext, exeCtx.numTask) } } def getPSAgent: PSAgent = synchronized{ if (psAgent != null) { psAgent } else { throw new Exception("psAgent is empty, pls. init first!") } } implicit def toGraphModelPool(exeCtx: ExecutorContext): GraphModelPool = synchronized { checkGraphModelPool(exeCtx) graphModelPool } }
Example 29
Source File: SlackSearch.scala From slack-client with MIT License | 5 votes |
package com.kifi.slack.models import org.joda.time.LocalDate import play.api.libs.json.{Json, Reads} import scala.language.implicitConversions case class SlackSearchRequest(query: SlackSearchRequest.Query, optional: SlackSearchRequest.Param*) object SlackSearchRequest { sealed abstract class Param(val name: String, val value: Option[String]) case class Query(query: String) extends Param("query", Some(query)) object Query { val trivial = Query("") def apply(queries: Option[Query]*): Query = Query(queries.flatten.map(_.query).mkString(" ")) def in(channelName: SlackChannelName) = Query(s"in:#${channelName.value.stripPrefix("#").stripPrefix("@")}") def from(username: SlackUsername) = Query(s"from:${username.value}") def before(date: LocalDate) = Query(s"before:$date") def after(date: LocalDate) = Query(s"after:$date") val hasLink = Query(s"has:link") implicit val reads = Reads.of[String].map(Query(_)) } sealed abstract class Sort(sort: String) extends Param("sort", Some(sort)) object Sort { case object ByScore extends Sort("score") case object ByTimestamp extends Sort("timestamp") } sealed abstract class SortDirection(dir: String) extends Param("sort_dir", Some(dir)) object SortDirection { case object Descending extends SortDirection("desc") case object Ascending extends SortDirection("asc") } object Highlight extends Param("highlight", Some("1")) case class Page(page: Int) extends Param("page", Some(page.toString)) object Page { val max = 100 } case class PageSize(count: Int) extends Param("count", Some(count.toString)) object PageSize { val max = 1000 } } case class SlackSearchResponse(query: SlackSearchRequest.Query, messages: SlackSearchResponse.Messages) object SlackSearchResponse { val trivial = SlackSearchResponse(SlackSearchRequest.Query.trivial, SlackSearchResponse.Messages.empty) case class Paging(count: Int, total: Int, page: Int, pages: Int) object Paging { val empty = Paging(0, 0, 0, 0) implicit val reads = Json.reads[Paging] } case class Messages(total: Int, paging: Paging, matches: Seq[SlackMessage]) object Messages { val empty = Messages(0, Paging.empty, Seq.empty) implicit val reads = Json.reads[Messages] } implicit val reads = Json.reads[SlackSearchResponse] }
Example 30
Source File: WithProps.scala From scalajs-react-bridge with MIT License | 5 votes |
package com.payalabs.scalajs.react.bridge import japgolly.scalajs.react.vdom.{TagMod, VdomElement, VdomNode} import scala.scalajs.js import scala.language.implicitConversions class WithProps(jsComponent: JsComponentType, jsProps: js.Object) { def apply(attrAndChildren: TagMod*): WithPropsAndTagsMods = { val (props, children) = extractPropsAndChildren(attrAndChildren) val mergedProps = mergeJSObjects(props.asInstanceOf[js.Dynamic], jsProps.asInstanceOf[js.Dynamic]) new WithPropsAndTagsMods(jsComponent, mergedProps.asInstanceOf[js.Object], children) } def apply(children: VdomNode*): VdomElement = { apply(List.empty[TagMod]: _*)(children: _*) } private def mergeJSObjects(objs: js.Dynamic*): js.Dynamic = { val result = js.Dictionary.empty[Any] for (source <- objs) { for ((key, value) <- source.asInstanceOf[js.Dictionary[Any]]) result(key) = value } result.asInstanceOf[js.Dynamic] } } object WithProps { implicit def toVdomNode(wp: WithProps): VdomNode = wp.apply() }
Example 31
Source File: ColumnDefinition.scala From sql-differential-privacy with MIT License | 5 votes |
package com.uber.engsec.dp.rewriting.rules import com.uber.engsec.dp.dataflow.column.AbstractColumnAnalysis.ColumnFacts import com.uber.engsec.dp.dataflow.column.NodeColumnFacts import com.uber.engsec.dp.rewriting.rules.Expr.ColumnReferenceByName import com.uber.engsec.dp.sql.relational_algebra.{Relation, Transformer} import org.apache.calcite.rel.logical.{LogicalProject, LogicalValues} import org.apache.calcite.tools.Frameworks class ColumnDefinition[+T <: Expr](val expr: T) case class ColumnDefinitionWithAlias[+T <: Expr](override val expr: T, alias: String) extends ColumnDefinition[T](expr) case class ColumnDefinitionWithOrdinal[+T <: Expr](override val expr: T, alias: String, idx: Int) extends ColumnDefinition[T](expr) object ColumnDefinition { import scala.collection.JavaConverters._ import scala.language.implicitConversions // Automatically cast to column if alias is attached to an expression implicit class ExprColumnAlias[T <: Expr](expr: T) { def AS(alias: String): ColumnDefinitionWithAlias[T] = ColumnDefinitionWithAlias[T](expr, alias) def AS(alias: ColumnReferenceByName): ColumnDefinitionWithAlias[T] = ColumnDefinitionWithAlias[T](expr, alias.name) } // Allow renaming of a column (keeping the same expression) implicit class ColumnAlias[T <: Expr](col: ColumnDefinition[T]) { def AS(alias: String): ColumnDefinitionWithAlias[T] = ColumnDefinitionWithAlias[T](col.expr, alias) } // Allow easy lookup of the column fact from an analysis result implicit class ColumnFactLookup[F](results: ColumnFacts[F]) { def apply[T <: Expr](col: ColumnDefinitionWithOrdinal[T]): F = results(col.idx) } implicit class NodeColumnFactLookup[F](results: NodeColumnFacts[_,F]) { def apply[T <: Expr](col: ColumnDefinitionWithOrdinal[T]): F = results.colFacts(col.idx) } // Creates a relation from a list of column definitions def rel(cols: ColumnDefinition[Expr]*): Relation = columnDefsToRelation(cols) implicit def columnDefsToRelation(cols: Seq[ColumnDefinition[Expr]]): Relation = { val cluster = new Transformer( Frameworks.newConfigBuilder .defaultSchema(Frameworks.createRootSchema(true)) .build ).cluster val inputRel = LogicalValues.createOneRow(cluster) val projections = cols.map{ _.expr.toRex(Relation(inputRel)) } val rowType = Helpers.getRecordType( cols.zip(projections) ) val result = LogicalProject.create(inputRel, projections.asJava, rowType) Relation(result) } implicit def columnReferenceToColumnDefinitionWithName(col: ColumnReferenceByName): ColumnDefinitionWithAlias[ColumnReferenceByName] = ColumnDefinitionWithAlias[ColumnReferenceByName](col, col.name) implicit def columnDefinitionWithAliasToColumnReferenceByName[T <: Expr](col: ColumnDefinitionWithAlias[T]): ColumnReferenceByName = Expr.col(col.alias) implicit def exprToColumnDefinition[T <: Expr](expr: T): ColumnDefinition[T] = new ColumnDefinition(expr) }
Example 32
Source File: AbstractColumnAnalysis.scala From sql-differential-privacy with MIT License | 5 votes |
package com.uber.engsec.dp.dataflow.column import com.uber.engsec.dp.dataflow.AbstractDataflowAnalysis import com.uber.engsec.dp.dataflow.column.AbstractColumnAnalysis.ColumnFacts import com.uber.engsec.dp.dataflow.domain.AbstractDomain override def transferNode(node: N, state: ColumnFacts[E]): ColumnFacts[E] override def joinNode(node: N, children: Iterable[N]): ColumnFacts[E] } object AbstractColumnAnalysis { import scala.language.implicitConversions type ColumnFacts[+J] = IndexedSeq[J] implicit def elemListToColumnFacts[J](elems: List[J]): ColumnFacts[J] = elems.toIndexedSeq implicit def elemsToColumnFacts[J](elems: J*): ColumnFacts[J] = elems.toIndexedSeq implicit def elemToColumnFacts[J](elem: J): ColumnFacts[J] = IndexedSeq(elem) def joinFacts[E](domain: AbstractDomain[E], facts: Iterable[E]): E = { val resultFact: E = if (facts.isEmpty) domain.bottom else if (facts.size == 1) facts.head else facts.reduce( (first, second) => domain.leastUpperBound(first, second) ) resultFact } }
Example 33
Source File: Attribute.scala From zio-telemetry with Apache License 2.0 | 5 votes |
package zio.telemetry.opencensus import io.opencensus.trace.AttributeValue object Attributes { trait implicits { import scala.language.implicitConversions implicit def boolToAttribute(b: Boolean): AttributeValue = AttributeValue.booleanAttributeValue(b) implicit def stringToAttribute(s: String): AttributeValue = AttributeValue.stringAttributeValue(s) implicit def longToAttribute(l: Long): AttributeValue = AttributeValue.longAttributeValue(l) implicit def doubleToAttribute(d: Double): AttributeValue = AttributeValue.doubleAttributeValue(d) } }
Example 34
Source File: FregeServlet.scala From sbt-frege with BSD 3-Clause "New" or "Revised" License | 5 votes |
package fregeweb import scala.language.implicitConversions import javax.servlet.http.HttpServlet import javax.servlet.http.{ HttpServletRequest => HSReq } import javax.servlet.http.{ HttpServletResponse => HSRes } import FregeWeb.TRequest import FregeWeb.TResponse import frege.run8.Box class FregeServlet extends HttpServlet { override def service(hsReq: HSReq, hsRes: HSRes): Unit = hsRes service hsReq } object `package` { implicit class HSResService(val hsRes: HSRes) extends AnyVal { def service(hsReq: HSReq): Unit = { val tReq: TRequest = TRequest.mk( new Box(hsReq.method) , new Box(hsReq.uri) ) val tRes: TResponse = FregeWeb.service(tReq).asInstanceOf[TResponse] write(tRes) } private def write(tRes: TResponse): Unit = { val status: Int = TResponse.status(tRes).asInstanceOf[Int] val body: String = TResponse.body(tRes).asInstanceOf[String] hsRes.setStatus(status) hsRes.getWriter().write(body) } } implicit class RichHSReq(val hsReq: HSReq) extends AnyVal { def method: String = hsReq.getMethod() def uri: String = if (hsReq.getRequestURI().startsWith(hsReq.getServletPath())) hsReq.getRequestURI().substring(hsReq.getServletPath().length()) else hsReq.getRequestURI() } }
Example 35
Source File: LambdaImplicits.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
package de.thm.move.implicits import java.util.function._ import scala.language.implicitConversions import javafx.util.Callback object LambdaImplicits { implicit def function[A,B](f: A => B): Function[A,B] = new Function[A,B] { override def apply(a:A): B = f(a) } implicit def supplier[A](f: => A): Supplier[A] = new Supplier[A] { override def get(): A = f } implicit def consumer[A](f: A => Unit): Consumer[A] = new Consumer[A] { override def accept(a:A): Unit = f(a) } implicit def consumer[A](f: => Unit): Consumer[A] = new Consumer[A] { override def accept(a:A): Unit = f } implicit def callback[A, B](f: A => B):Callback[A,B] = new Callback[A,B] { override def call(param: A): B = f(param) } }
Example 36
Source File: FxHandlerImplicits.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
package de.thm.move.implicits import java.util.function.{Predicate, Function => JavaFunction} import javafx.beans.value.{ChangeListener, ObservableValue} import javafx.event.{ActionEvent, EventHandler} import javafx.scene.input.{KeyEvent, MouseEvent} import scala.language.implicitConversions object FxHandlerImplicits { implicit def mouseEventHandler[T >: MouseEvent, Z](fn: T => Z): EventHandler[MouseEvent] = new EventHandler[MouseEvent]() { override def handle(event: MouseEvent): Unit = fn(event) } implicit def keyEventHandler[T >: KeyEvent, Z](fn: T => Z): EventHandler[KeyEvent] = new EventHandler[KeyEvent]() { override def handle(ke:KeyEvent): Unit = fn(ke) } implicit def actionEventHandler[T >: ActionEvent, Z](fn: T => Z): EventHandler[ActionEvent] = new EventHandler[ActionEvent]() { override def handle(event: ActionEvent): Unit = fn(event) } implicit def changeListener[A, Z](fn: (A, A) => Z):ChangeListener[A] = new ChangeListener[A] { override def changed(observable: ObservableValue[_ <: A], oldValue: A, newValue: A): Unit = fn(oldValue, newValue) } implicit def changeListener[A, Z](fn: (ObservableValue[_<:A], A, A) => Z):ChangeListener[A] = new ChangeListener[A] { override def changed(observable: ObservableValue[_<: A], oldValue: A, newValue: A): Unit = fn(observable, oldValue, newValue) } implicit def eventHandler[E <: javafx.event.Event, Z](fn: E => Z):EventHandler[E] = new EventHandler[E] { override def handle(event: E): Unit = fn(event) } implicit def predicate[A](fn: A => Boolean):Predicate[A] = new Predicate[A]() { override def test(a:A): Boolean = fn(a) } implicit def function[A, B](fn: A => B): JavaFunction[A,B] = new JavaFunction[A,B]() { override def apply(v: A): B = fn(v) } }
Example 37
Source File: Pimpers.scala From sparkplug with MIT License | 5 votes |
package springnz.sparkplug.util import com.typesafe.scalalogging.Logger import scala.concurrent.{ ExecutionContext, Future } import scala.language.implicitConversions import scala.util.{ Failure, Try } private[sparkplug] object Pimpers { implicit class TryPimper[A](t: Try[A]) { def withErrorLog(msg: String)(implicit log: Logger): Try[A] = t.recoverWith { case e ⇒ log.error(msg, e) Failure(e) } def withFinally[T](block: ⇒ T): Try[A] = { block t } } implicit class FuturePimper[T](f: Future[T]) { def withErrorLog(msg: String)(implicit log: Logger, ec: ExecutionContext): Future[T] = { f.onFailure { case e ⇒ log.error(msg, e) } f } } implicit def map2Properties(map: Map[String, String]): java.util.Properties = { (new java.util.Properties /: map) { case (props, (k, v)) ⇒ props.put(k, v); props } } }
Example 38
Source File: VwLabelRowCreatorTest.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.dataset.vw.labeled import com.eharmony.aloha.dataset.SparseFeatureExtractorFunction import com.eharmony.aloha.semantics.func.GenFunc.f0 import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.BlockJUnit4ClassRunner import scala.language.{postfixOps, implicitConversions} @RunWith(classOf[BlockJUnit4ClassRunner]) final class VwLabelRowCreatorTest { private[this] val lab = 3d private[this] val imp0 = 0d private[this] val imp1 = 1d private[this] val imp2 = 2d private[this] val emptyTag = "" private[this] val tag = "t" private[this] implicit def liftToOption[A](a: A): Option[A] = Option(a) private[this] def spec(lab: Option[Double] = None, imp: Option[Double] = None, tag: Option[String] = None): VwLabelRowCreator[Any] = { val fef = new SparseFeatureExtractorFunction[Any](Vector("f1" -> f0("Empty", _ => Nil))) VwLabelRowCreator(fef, 0 to 0 toList, Nil, None, f0("", _ => lab), f0("", _ => imp), f0("", _ => tag)) } private[this] def testLabelRemoval(spec: VwLabelRowCreator[Any], exp: String = ""): Unit = assertEquals(exp, spec(())._2.toString) // All of these should return empty label because the Label function returns a missing label. @Test def testS___() = testLabelRemoval(spec()) @Test def testS__e() = testLabelRemoval(spec(tag = emptyTag)) @Test def testS__t() = testLabelRemoval(spec(tag = tag)) @Test def testS_0_() = testLabelRemoval(spec(imp = imp0)) @Test def testS_0e() = testLabelRemoval(spec(imp = imp0, tag = emptyTag)) @Test def testS_0t() = testLabelRemoval(spec(imp = imp0, tag = tag)) @Test def testS_1_() = testLabelRemoval(spec(imp = imp1)) @Test def testS_1e() = testLabelRemoval(spec(imp = imp1, tag = emptyTag)) @Test def testS_1t() = testLabelRemoval(spec(imp = imp1, tag = tag)) @Test def testS_2_() = testLabelRemoval(spec(imp = imp2)) @Test def testS_2e() = testLabelRemoval(spec(imp = imp2, tag = emptyTag)) @Test def testS_2t() = testLabelRemoval(spec(imp = imp2, tag = tag)) // Importance not provided makes entire label vanish @Test def testS1_e() = testLabelRemoval(spec(lab = lab, tag = emptyTag)) @Test def testS1_t() = testLabelRemoval(spec(lab = lab, tag = tag)) // Importance of zero is given explicitly. @Test def testS10_() = testLabelRemoval(spec(lab = lab, imp = imp0), "3 0 |") @Test def testS10e() = testLabelRemoval(spec(lab = lab, imp = imp0, tag = emptyTag), "3 0 |") @Test def testS10t() = testLabelRemoval(spec(lab = lab, imp = imp0, tag = tag), "3 0 t|") // Importance of 1 is omitted. @Test def testS11_() = testLabelRemoval(spec(lab = lab, imp = imp1), "3 |") @Test def testS11e() = testLabelRemoval(spec(lab = lab, imp = imp1, tag = emptyTag), "3 |") @Test def testS11t() = testLabelRemoval(spec(lab = lab, imp = imp1, tag = tag), "3 t|") @Test def testS12_() = testLabelRemoval(spec(lab = lab, imp = imp2), "3 2 |") @Test def testS12e() = testLabelRemoval(spec(lab = lab, imp = imp2, tag = emptyTag), "3 2 |") @Test def testS12t() = testLabelRemoval(spec(lab = lab, imp = imp2, tag = tag), "3 2 t|") @Test def testStringLabel() { val spec = new VwLabelRowCreator( new SparseFeatureExtractorFunction(Vector("f1" -> f0("Empty", (_: Double) => Nil))), 0 to 0 toList, Nil, None, f0("", (s: Double) => Option(s)), // Label f0("", (_: Double) => Option(1d)), // Importance f0("", (_: Double) => None)) // Tag val values = Seq( -1.0 -> "-1", -0.99999999999999999 -> "-1", -0.9999999999999999 -> "-0.9999999999999999", -1.0E-16 -> "-0.0000000000000001", -1.0E-17 -> "-0.00000000000000001", -1.0E-18 -> "-0", 0.0 -> "0", 1.0E-18 -> "0", 1.0E-17 -> "0.00000000000000001", 1.0E-16 -> "0.0000000000000001", 0.9999999999999999 -> "0.9999999999999999", 0.99999999999999999 -> "1", 1.0 -> "1" ) values foreach { case(v, ex) => assertEquals(s"for line: $v", Option(ex), spec.stringLabel(v)) } } }
Example 39
Source File: CompiledSemanticsTest.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.semantics.compiled import java.{lang => jl} import com.eharmony.aloha.FileLocations import com.eharmony.aloha.reflect.RefInfo import com.eharmony.aloha.semantics.compiled.compiler.TwitterEvalCompiler import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.BlockJUnit4ClassRunner import scala.concurrent.ExecutionContext.Implicits.global import scala.language.implicitConversions @RunWith(classOf[BlockJUnit4ClassRunner]) class CompiledSemanticsTest { private[this] val compiler = TwitterEvalCompiler(classCacheDir = Option(FileLocations.testGeneratedClasses)) @Test def test0() { val s = CompiledSemantics(compiler, MapStringLongPlugin, Seq()) val f = s.createFunction[Int]("List(${five:-5L}).sum.toInt").right.get val x1 = Map("five" -> 1L) val x2 = Map.empty[String, Long] assertEquals(1, f(x1)) assertEquals(5, f(x2)) } @Test def test1() { val s = CompiledSemantics(compiler, MapStringLongPlugin, Seq()) val f = s.createFunction[Int]("List(${one}, ${two}, ${three}).sum.toInt", Option(Int.MinValue)).right.get val x1 = Map[String, Long]("one" -> 2, "two" -> 4, "three" -> 6) val y1 = f(x1) assertEquals(12, y1) } @Test def test2() { val s = CompiledSemantics(compiler, MapStringLongPlugin, Seq()) val f = s.createFunction[Double]("${user.inboundComm} / ${user.pageViews}.toDouble", Some(Double.NaN)).right.get val x1 = Map[String, Long]("user.inboundComm" -> 5, "user.pageViews" -> 10) val x2 = Map[String, Long]("user.inboundComm" -> 5) val y1 = f(x1) val y2 = f(x2) assertEquals(0.5, y1, 1.0e-6) assertEquals(Double.NaN, y2, 0) } @Test def test3() { val s = CompiledSemantics(compiler, MapStringLongPlugin, Seq()) val f = s.createFunction[Long]("new util.Random(0).nextLong").right.get val y1 = f(null) assertEquals(-4962768465676381896L, y1) } @Test def testNullDefaultOnExistingValue() { val s = CompiledSemantics(compiler, MapStringLongPlugin, Seq("com.eharmony.aloha.semantics.compiled.StaticFuncs._")) val f = s.createFunction[Long]("f(${one})").left.map(_.foreach(println)).right.get val y1 = f(Map("one" -> 1)) assertEquals(18, y1) } @Test def testNullDefaultOnNonMissingPrimitiveValue() { val s = CompiledSemantics(compiler, MapStringLongPlugin, Seq("com.eharmony.aloha.semantics.compiled.StaticFuncs._")) var errors: Seq[String] = Nil val f = s.createFunction[Long]("f(${missing:-null}.asInstanceOf[java.lang.Long])"). left.map(e => errors = e). right.get val y1 = f(Map("missing" -> 13)) assertEquals("Should process correctly when defaulting to null", 18, y1) assertEquals("No errors should appear", 0, errors.size) } private[this] object MapStringLongPlugin extends CompiledSemanticsPlugin[Map[String, Long]] { def refInfoA = RefInfo[Map[String, Long]] def accessorFunctionCode(spec: String) = { val required = Seq("user.inboundComm", "one", "two", "three") spec match { case s if required contains s => Right(RequiredAccessorCode(Seq("(_:Map[String, Long]).apply(\"" + spec + "\")"))) case _ => Right(OptionalAccessorCode(Seq("(_:Map[String, Long]).get(\"" + spec + "\")"))) } } } } object StaticFuncs { def f(a: jl.Long): Long = if (null == a) 13 else 18 implicit def doubletoJlDouble(d: Double): java.lang.Double = java.lang.Double.valueOf(d) }
Example 40
Source File: MLPairRDDFunctions.scala From sona with Apache License 2.0 | 5 votes |
package com.tencent.angel.sona.ml.rdd import scala.language.implicitConversions import scala.reflect.ClassTag import org.apache.spark.annotation.DeveloperApi import org.apache.spark.rdd.RDD import org.apache.spark.util.BoundedPriorityQueue /** * :: DeveloperApi :: * Machine learning specific Pair RDD functions. */ @DeveloperApi class MLPairRDDFunctions[K: ClassTag, V: ClassTag](self: RDD[(K, V)]) extends Serializable { /** * Returns the top k (largest) elements for each key from this RDD as defined by the specified * implicit Ordering[T]. * If the number of elements for a certain key is less than k, all of them will be returned. * * @param num k, the number of top elements to return * @param ord the implicit ordering for T * @return an RDD that contains the top k values for each key */ def topByKey(num: Int)(implicit ord: Ordering[V]): RDD[(K, Array[V])] = { self.aggregateByKey(new BoundedPriorityQueue[V](num)(ord))( seqOp = (queue, item) => { queue += item }, combOp = (queue1, queue2) => { queue1 ++= queue2 } ).mapValues(_.toArray.sorted(ord.reverse)) // This is a min-heap, so we reverse the order. } } /** * :: DeveloperApi :: */ @DeveloperApi object MLPairRDDFunctions { implicit def fromPairRDD[K: ClassTag, V: ClassTag](rdd: RDD[(K, V)]): MLPairRDDFunctions[K, V] = new MLPairRDDFunctions[K, V](rdd) }
Example 41
Source File: JdbcUtil.scala From sundial with MIT License | 5 votes |
package util import java.sql.{Connection, Timestamp, ResultSet} import java.util.Date import scala.language.implicitConversions object JdbcUtil { implicit def resultSetItr(resultSet: ResultSet): Stream[ResultSet] = { new Iterator[ResultSet] { def hasNext = resultSet.next() def next() = resultSet }.toStream } implicit def javaDate(ts: Timestamp): Date = { new Date(ts.getTime()) } implicit def dateToTimestamp(date: Date) = { if (date != null) new Timestamp(date.getTime()) else null } private def getNullable[T](rs: ResultSet, f: ResultSet => T): Option[T] = { val obj = f(rs) if (rs.wasNull()) { Option.empty } else { Some(obj) } } def getIntOption(rs: ResultSet, col: String) = getNullable(rs, rs => rs.getInt(col)) def makeStringArray(seq: Seq[String])(implicit conn: Connection) = { conn.createArrayOf("varchar", seq.toArray[AnyRef]) } def getStringArray(rs: ResultSet, col: String) = { Option(rs.getArray(col)) .map(_.getArray().asInstanceOf[Array[String]].toList) } }
Example 42
Source File: QueryCsvTest.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.spark.sstreaming import com.github.dnvriend.TestSpec import org.apache.commons.io.FileUtils import org.apache.spark.sql.streaming.{ OutputMode, ProcessingTime } import org.apache.spark.sql.types._ import org.scalatest.Ignore import scala.concurrent.duration._ import scala.language.implicitConversions @Ignore class QueryCsvTest extends TestSpec { def copyFiles(nrTimes: Int = 10): Unit = { FileUtils.deleteDirectory("/tmp/csv") FileUtils.forceMkdir("/tmp/csv") (1 to nrTimes).foreach { x => FileUtils.copyFile(TestSpec.PeopleCsv, s"/tmp/csv/people-$x") } } val schema: StructType = StructType(Array( StructField("id", LongType, nullable = false), StructField("name", StringType, nullable = true), StructField("age", IntegerType, nullable = true) )) it should "query csv file" in withSparkSession { spark => copyFiles() val csv = spark.readStream .schema(schema) .format("csv") .option("maxFilesPerTrigger", 1) .option("header", "false") // Use first line of all files as header .option("inferSchema", "false") // Automatically infer data types .option("delimiter", ";") .load("/tmp/csv") csv.printSchema() println("Is the query streaming: " + csv.isStreaming) println("Are there any streaming queries? " + spark.streams.active.isEmpty) val query = csv .writeStream .format("console") .trigger(ProcessingTime(5.seconds)) .queryName("consoleStream") .outputMode(OutputMode.Append()) .start() // waiting for data sleep(3.seconds) spark.streams .active .foreach(println) spark.streams .active .foreach(_.explain(extended = true)) query.awaitTermination(20.seconds) } }
Example 43
Source File: CurrentPersistenceIdsQuerySourceTest.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.spark.sstreaming import java.util.UUID import java.util.concurrent.atomic.AtomicLong import akka.actor.{ ActorRef, Props } import akka.persistence.PersistentActor import akka.testkit.TestProbe import com.github.dnvriend.TestSpec import com.github.dnvriend.spark.datasources.SparkImplicits._ import com.github.dnvriend.spark.datasources.person.Person import org.apache.spark.sql.streaming.{ OutputMode, ProcessingTime } import org.scalatest.Ignore import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.language.implicitConversions object PersonActor { final case class BlogPost(id: Long, text: String) } class PersonActor(val persistenceId: String, schedule: Boolean)(implicit ec: ExecutionContext) extends PersistentActor { val counter = new AtomicLong() def ping() = context.system.scheduler.scheduleOnce(200.millis, self, "persist") def randomId: String = UUID.randomUUID.toString override val receiveRecover: Receive = PartialFunction.empty override val receiveCommand: Receive = { case "persist" => persist(Person(counter.incrementAndGet(), s"foo-$randomId", 20)) { _ => sender() ! "ack" } if (schedule) ping() } if (schedule) ping() } @Ignore class CurrentPersistenceIdsQuerySourceTest extends TestSpec { def withPersistentActor(pid: String = randomId, schedule: Boolean = false)(f: ActorRef => TestProbe => Unit): Unit = { val tp = TestProbe() val ref = system.actorOf(Props(new PersonActor(pid, schedule))) try f(ref)(tp) finally killActors(ref) } it should "query read journal" in withSparkSession { spark => withPersistentActor() { ref => tp => tp.send(ref, "persist") tp.expectMsg("ack") val jdbcReadJournal = spark.readStream .currentPersistenceIds("jdbc-read-journal") jdbcReadJournal.printSchema() println("Is the query streaming: " + jdbcReadJournal.isStreaming) println("Are there any streaming queries? " + spark.streams.active.isEmpty) val query = jdbcReadJournal .writeStream .format("console") .trigger(ProcessingTime(1.seconds)) .queryName("consoleStream") .outputMode(OutputMode.Append()) .start() query.awaitTermination(10.seconds) } } }
Example 44
Source File: DomainModelSyntax.scala From http4s-poc-api with MIT License | 5 votes |
package model package syntax import model.DomainModel._ import model.syntax.DomainModelSyntax.{BigDecimalOps, LongOps, StringOps, domainModelTaggedOps} import shapeless.tag import shapeless.tag.@@ import scala.language.implicitConversions private[syntax] trait DomainModelSyntax { implicit def domainModelLongSyntax(x: Long) = new LongOps(x) implicit def domainModelStringSyntax(x: String) = new StringOps(x) implicit def domainModelBigDecimalOpsSyntax(x: BigDecimal) = new BigDecimalOps(x) implicit def domainModelTaggedSyntax[A](a: A) = new domainModelTaggedOps[A](a) } private[syntax] object DomainModelSyntax { import syntax.domainModel._ final class LongOps(private val x: Long) extends AnyVal { def asUserId: UserId = x.refined[UserIdT] def asProductId: ProductId = x.refined[ProductIdT] } final class StringOps(private val x: String) extends AnyVal { def asCountry: Country = x.refined[CountryT] def asUserAddress: UserAddress = x.refined[UserAddressT] def asCurrency: Currency = x.refined[CurrencyT] def asProductSpec: ProductSpec = x.refined[ProductSpecT] } final class BigDecimalOps(private val x: BigDecimal) extends AnyVal { def asMoneyAmount: MoneyAmount = x.refined[MoneyAmountT] } final class domainModelTaggedOps[A](private val a: A) extends AnyVal { def refined[T]: A @@ T = tag[T](a) } }
Example 45
Source File: ResponseSyntax.scala From http4s-poc-api with MIT License | 5 votes |
package external package library package syntax import cats.effect.Sync import cats.syntax.applicativeError._ import org.http4s.Response import scala.language.implicitConversions import scala.reflect.ClassTag private[syntax] trait ResponseSyntax { implicit def responseSyntax[F[_]](r: F[Response[F]]): ResponseOps[F] = new ResponseOps(r) } private[syntax] class ResponseOps[F[_]](private val r: F[Response[F]]) extends AnyVal { def handlingFailures[E <: Throwable: ClassTag](hf: E => F[Response[F]])( implicit ev: Sync[F] ): F[Response[F]] = r recoverWith { case e: E => hf(e) } }
Example 46
Source File: IoAdaptSyntax.scala From http4s-poc-api with MIT License | 5 votes |
package external package library package syntax import cats.MonadError import external.library.IoAdapt.--> import scala.language.implicitConversions private[syntax] trait IoAdaptSyntax { implicit def ioAdaptSyntax[F[_], A](fa: =>F[A]): IoAdaptOps[F, A] = new IoAdaptOps(fa) implicit def ioAdaptEitherSyntax[F[_], A, E](fa: =>F[Either[E, A]]): IoAdaptEitherOps[F, A, E] = new IoAdaptEitherOps(fa) } private[syntax] final class IoAdaptOps[F[_], A](fa: =>F[A]) { def adaptedTo[G[_]](implicit nt: F --> G): G[A] = nt.apply(fa) } private[syntax] class IoAdaptEitherOps[F[_], A, E](private val fa: F[Either[E, A]]) extends AnyVal { def liftIntoMonadError[G[_]](implicit nt: F --> G, err: MonadError[G, E]): G[A] = (err.rethrow[A, E] _ compose nt.apply)(fa) }
Example 47
Source File: ErrorAdaptSyntax.scala From http4s-poc-api with MIT License | 5 votes |
package external package library package syntax import cats.MonadError import cats.syntax.monadError._ import scala.language.implicitConversions private[syntax] trait ErrorAdaptSyntax { implicit def errorAdaptSyntax[F[_], A](anFa: =>F[A]): ErrorAdaptOps[F, A] = new ErrorAdaptOps(anFa) } private[syntax] class ErrorAdaptOps[F[_], A](private val anFa: F[A]) extends AnyVal { def narrowFailureWith[E <: Throwable](ef: Throwable => E)(implicit ev: MonadError[F, Throwable]): F[A] = anFa adaptError { case th: Throwable => ef(th) } def narrowFailureTo[E <: Throwable](implicit ev: MonadError[F, Throwable], ef: ThrowableMap[E]): F[A] = anFa adaptError { case th: Throwable => ef map th } }
Example 48
Source File: ResponseVerificationSyntax.scala From http4s-poc-api with MIT License | 5 votes |
package syntax import java.nio.charset.StandardCharsets import cats.data.Validated import cats.instances.string._ import cats.syntax.eq._ import cats.syntax.show._ import cats.syntax.validated._ import cats.{Eq, Show} import org.http4s.{EntityDecoder, Response, Status} import typeclasses.RunSync import zio.Task import zio.interop.catz._ import scala.language.implicitConversions private[syntax] trait ResponseVerificationSyntax { implicit def verifiedSyntax[A](a: A): VerifiedOps[A] = new VerifiedOps(a) implicit def verifiedOptionSyntax[A](a: Option[A]): VerifiedOptionOps[A] = new VerifiedOptionOps(a) implicit def responseVerificationSyntax(response: Task[Response[Task]]) = new IoResponseResultOps(response) } private[syntax] class IoResponseResultOps(private val response: Task[Response[Task]]) extends AnyVal { import syntax.responseVerification._ def verify[A: EntityDecoder[Task, *]](status: Status, check: A => Verified[A])( implicit ev1: Eq[Status], ev2: Show[Status], run: RunSync[Task] ): Verified[A] = run .syncUnsafe(response) .fold( err => s"Should succeed but returned the error $err".invalidNel, res => res.status isSameAs status andThen { _ => verifiedResponse[A](res, check) } ) def verifyResponseText(status: Status, expected: String)( implicit ev1: Eq[Status], ev2: Show[Status], run: RunSync[Task] ): Verified[String] = run .syncUnsafe(response) .fold( err => s"Should succeed but returned the error $err".invalidNel, res => res.status isSameAs status andThen { _ => verifiedResponseText(res, expected) } ) private def verifiedResponse[A: EntityDecoder[Task, *]](res: Response[Task], check: A => Verified[A])( implicit run: RunSync[Task] ): Verified[A] = run .syncUnsafe(res.as[A]) .fold( respErr => s"Response should succeed but returned the error $respErr".invalidNel, respRes => check(respRes) ) private def verifiedResponseText[A](res: Response[Task], expected: String)( implicit run: RunSync[Task] ): Verified[String] = run .syncUnsafe(res.body.compile.toVector) .map(_.toArray) .fold( respErr => s"Response should succeed but returned the error $respErr".invalidNel, respMsg => new String(respMsg, StandardCharsets.UTF_8) isSameAs expected ) } private[syntax] class VerifiedOps[A](private val a: A) extends AnyVal { def isNotSameAs(expected: =>A)(implicit ev1: Eq[A], ev2: Show[A]): Verified[A] = Validated.condNel( a =!= expected, a, s"Unexpected value. Expected different from ${expected.show} but was ${a.show}" ) def isSameAs(expected: =>A)(implicit ev1: Eq[A], ev2: Show[A]): Verified[A] = Validated.condNel(a === expected, a, s"Unexpected value. Expected ${expected.show} but was ${a.show}") def is(p: A => Boolean, reason: =>String = "")(implicit ev: Show[A]): Verified[A] = Validated.condNel(p(a), a, s"Unexpected value ${a.show}: Reason $reason") } private[syntax] class VerifiedOptionOps[A](private val a: Option[A]) extends AnyVal { def isNotEmpty: Verified[Option[A]] = Validated.condNel(a.isDefined, a, s"Unexpected empty option value") }
Example 49
Source File: VertexPartition.scala From zen with Apache License 2.0 | 5 votes |
package org.apache.spark.graphx2.impl import scala.reflect.ClassTag import org.apache.spark.util.collection.BitSet import org.apache.spark.graphx2._ import org.apache.spark.graphx2.util.collection.GraphXPrimitiveKeyOpenHashMap object VertexPartition { class VertexPartition[VD: ClassTag]( val index: VertexIdToIndexMap, val values: Array[VD], val mask: BitSet) extends VertexPartitionBase[VD] class VertexPartitionOps[VD: ClassTag](self: VertexPartition[VD]) extends VertexPartitionBaseOps[VD, VertexPartition](self) { def withIndex(index: VertexIdToIndexMap): VertexPartition[VD] = { new VertexPartition(index, self.values, self.mask) } def withValues[VD2: ClassTag](values: Array[VD2]): VertexPartition[VD2] = { new VertexPartition(self.index, values, self.mask) } def withMask(mask: BitSet): VertexPartition[VD] = { new VertexPartition(self.index, self.values, mask) } }
Example 50
Source File: SparkUtils.scala From zen with Apache License 2.0 | 5 votes |
package com.github.cloudml.zen.ml.util import breeze.linalg.{Vector => BV, SparseVector => BSV, DenseVector => BDV} import breeze.storage.Zero import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.spark.SparkConf import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.mllib.linalg.{DenseVector => SDV, Vector => SV, SparseVector => SSV} import scala.language.implicitConversions import scala.reflect.ClassTag private[zen] object SparkUtils { implicit def toBreeze(sv: SV): BV[Double] = { sv match { case SDV(data) => new BDV(data) case SSV(size, indices, values) => new BSV(indices, values, size) } } implicit def fromBreeze(breezeVector: BV[Double]): SV = { breezeVector match { case v: BDV[Double] => if (v.offset == 0 && v.stride == 1 && v.length == v.data.length) { new SDV(v.data) } else { new SDV(v.toArray) // Can't use underlying array directly, so make a new one } case v: BSV[Double] => if (v.index.length == v.used) { new SSV(v.length, v.index, v.data) } else { new SSV(v.length, v.index.slice(0, v.used), v.data.slice(0, v.used)) } case v: BV[_] => sys.error("Unsupported Breeze vector type: " + v.getClass.getName) } } def toBreezeConv[T: ClassTag](sv: SV)(implicit num: Numeric[T]): BV[T] = { val zero = num.zero implicit val conv: Array[Double] => Array[T] = (data) => { data.map(ele => (zero match { case zero: Double => ele case zero: Float => ele.toFloat case zero: Int => ele.toInt case zero: Long => ele.toLong }).asInstanceOf[T]).array } sv match { case SDV(data) => new BDV[T](data) case SSV(size, indices, values) => new BSV[T](indices, values, size)(Zero[T](zero)) } } def fromBreezeConv[T: ClassTag](breezeVector: BV[T])(implicit num: Numeric[T]): SV = { implicit val conv: Array[T] => Array[Double] = (data) => { data.map(num.toDouble).array } breezeVector match { case v: BDV[T] => if (v.offset == 0 && v.stride == 1 && v.length == v.data.length) { new SDV(v.data) } else { new SDV(v.toArray) // Can't use underlying array directly, so make a new one } case v: BSV[T] => if (v.index.length == v.used) { new SSV(v.length, v.index, v.data) } else { new SSV(v.length, v.index.slice(0, v.used), v.data.slice(0, v.used)) } case v: BV[T] => sys.error("Unsupported Breeze vector type: " + v.getClass.getName) } } def getFileSystem(conf: SparkConf, path: Path): FileSystem = { val hadoopConf = SparkHadoopUtil.get.newConfiguration(conf) if (sys.env.contains("HADOOP_CONF_DIR") || sys.env.contains("YARN_CONF_DIR")) { val hdfsConfPath = if (sys.env.get("HADOOP_CONF_DIR").isDefined) { sys.env.get("HADOOP_CONF_DIR").get + "/core-site.xml" } else { sys.env.get("YARN_CONF_DIR").get + "/core-site.xml" } hadoopConf.addResource(new Path(hdfsConfPath)) } path.getFileSystem(hadoopConf) } def deleteChkptDirs(conf: SparkConf, dirs: Array[String]): Unit = { val fs = getFileSystem(conf, new Path(dirs(0))) dirs.foreach(dir => { fs.delete(new Path(dir), true) }) } }
Example 51
Source File: StatsActor.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.actors.transform import akka.actor.{ActorLogging, Props} import org.json4s.JsonAST.JValue import org.json4s.JsonDSL._ import org.json4s._ import org.json4s.jackson.JsonMethods.render import io.coral.actors.{SimpleTimer, NoEmitTrigger, CoralActor} import io.coral.lib.SummaryStatistics import scala.language.implicitConversions object StatsActor { implicit val formats = org.json4s.DefaultFormats def getParams(json: JValue) = { for { field <- (json \ "params" \ "field").extractOpt[String] } yield { field } } def apply(json: JValue): Option[Props] = { getParams(json).map(_ => Props(classOf[StatsActor], json)) } } class StatsActor(json: JObject) extends CoralActor(json) with ActorLogging with NoEmitTrigger with SimpleTimer { implicit def double2jvalue(x: Double): JValue = if (x.isNaN) JNull else JDouble(x) val field = StatsActor.getParams(json).get val statistics = SummaryStatistics.mutable override def simpleTimer = { statistics.reset() Some(JNothing) } override def state = Map( ("count", render(statistics.count)), ("avg", render(statistics.average)), ("sd", render(statistics.populationSd)), ("min", render(statistics.min)), ("max", render(statistics.max)) ) override def noEmitTrigger(json: JObject) = { for { value <- (json \ field).extractOpt[Double] } yield { statistics.append(value) } } }
Example 52
Source File: TlsIT.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.sandbox import java.io.File import com.daml.bazeltools.BazelRunfiles._ import com.daml.ledger.api.testing.utils.SuiteResourceManagementAroundAll import com.daml.ledger.api.tls.TlsConfiguration import com.daml.ledger.client.LedgerClient import com.daml.ledger.client.configuration.{ CommandClientConfiguration, LedgerClientConfiguration, LedgerIdRequirement } import com.daml.platform.sandbox.config.SandboxConfig import com.daml.platform.sandbox.services.SandboxFixture import org.scalatest.AsyncWordSpec import scala.language.implicitConversions class TlsIT extends AsyncWordSpec with SandboxFixture with SuiteResourceManagementAroundAll { private val List( certChainFilePath, privateKeyFilePath, trustCertCollectionFilePath, clientCertChainFilePath, clientPrivateKeyFilePath) = { List("server.crt", "server.pem", "ca.crt", "client.crt", "client.pem").map { src => new File(rlocation("ledger/test-common/test-certificates/" + src)) } } private implicit def str2File(str: String): File = new File(str) private lazy val tlsEnabledConfig = LedgerClientConfiguration( "appId", LedgerIdRequirement.none, CommandClientConfiguration.default, TlsConfiguration( enabled = true, Some(clientCertChainFilePath), Some(clientPrivateKeyFilePath), Some(trustCertCollectionFilePath)).client ) override protected lazy val config: SandboxConfig = super.config.copy( tlsConfig = Some( TlsConfiguration( enabled = true, Some(certChainFilePath), Some(privateKeyFilePath), Some(trustCertCollectionFilePath)))) private lazy val clientF = LedgerClient.singleHost(serverHost, serverPort.value, tlsEnabledConfig) "A TLS-enabled server" should { "reject ledger queries when the client connects without tls" in { recoverToSucceededIf[io.grpc.StatusRuntimeException] { LedgerClient .singleHost(serverHost, serverPort.value, tlsEnabledConfig.copy(sslContext = None)) .flatMap(_.transactionClient.getLedgerEnd()) } } "serve ledger queries when the client presents a valid certificate" in { clientF.flatMap(_.transactionClient.getLedgerEnd()).map(_ => succeed) } } }
Example 53
Source File: ValueConversions.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.participant.util import java.time.Instant import java.util.concurrent.TimeUnit import com.daml.ledger.api.v1.commands.{ Command, CreateCommand, ExerciseByKeyCommand, ExerciseCommand } import com.daml.ledger.api.v1.value.Value.Sum import com.daml.ledger.api.v1.value.Value.Sum.{ ContractId, Numeric, Int64, Party, Text, Timestamp, List => DamlListValue } import com.daml.ledger.api.v1.value.{Identifier, Record, RecordField, Value, List => DamlList} import scala.language.implicitConversions object ValueConversions { val unit = Value(Sum.Record(Record.defaultInstance)) implicit class StringValues(val s: String) extends AnyVal { def asParty: Value = Value(Party(s)) def asNumeric: Value = Value(Numeric(s)) def asText: Value = Value(Text(s)) def asContractId: Value = Value(ContractId(s)) } implicit class InstantValues(val i: Instant) extends AnyVal { def asTime: Value = { val micros = TimeUnit.SECONDS.toMicros(i.getEpochSecond) + TimeUnit.NANOSECONDS.toMicros( i.getNano.toLong) Value(Timestamp(micros)) } } implicit class BooleanValues(val b: Boolean) extends AnyVal { def asBoolean: Value = Value(Value.Sum.Bool(b)) } implicit class LongValues(val i: Long) extends AnyVal { def asInt64: Value = Value(Int64(i)) } implicit class LabeledValues(val labeledValues: Seq[(String, Value)]) extends AnyVal { def asRecord = Record(None, asRecordFields) def asRecordOf(identifier: Identifier) = Record(Some(identifier), asRecordFields) def asRecordValue = Value(Value.Sum.Record(asRecord)) def asRecordValueOf(identifier: Identifier) = Value(Value.Sum.Record(asRecordOf(identifier))) def asRecordFields: Seq[RecordField] = { labeledValues.map { case (k, v) => RecordField(k, Some(v)) } } } implicit class ValueSequences(val values: Seq[Value]) extends AnyVal { def asList = Value(DamlListValue(DamlList(values))) } implicit def value2Optional(value: Value): Option[Value] = Some(value) implicit class ExerciseCommands(val exercise: ExerciseCommand) extends AnyVal { def wrap = Command(Command.Command.Exercise(exercise)) } implicit class ExerciseByKeyCommands(val exerciseByKey: ExerciseByKeyCommand) extends AnyVal { def wrap = Command(Command.Command.ExerciseByKey(exerciseByKey)) } implicit class CreateCommands(val create: CreateCommand) extends AnyVal { def wrap = Command(Command.Command.Create(create)) } }
Example 54
Source File: ValueConversions.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.extractor.helpers import java.time.Instant import java.util.concurrent.TimeUnit import com.daml.ledger.api.v1.commands.{Command, CreateCommand, ExerciseCommand} import com.daml.ledger.api.v1.value.Value.Sum.{ Numeric, Int64, Party, Text, Timestamp, List => DamlListValue } import com.daml.ledger.api.v1.value.{Identifier, Record, RecordField, Value, List => DamlList} import scala.language.implicitConversions object ValueConversions { implicit class StringValues(val s: String) extends AnyVal { def asParty: Value = Value(Party(s)) def asNumeric: Value = Value(Numeric(s)) def asText: Value = Value(Text(s)) } implicit class InstantValues(val i: Instant) extends AnyVal { def asTime: Value = { val micros = TimeUnit.SECONDS.toMicros(i.getEpochSecond) + TimeUnit.NANOSECONDS.toMicros( i.getNano.toLong) Value(Timestamp(micros)) } } implicit class LongValues(val i: Long) extends AnyVal { def asInt64: Value = Value(Int64(i)) } implicit class LabeledValues(val labeledValues: Seq[(String, Value)]) extends AnyVal { def asRecord = Record(None, recordFields) def asRecordOf(identifier: Identifier) = Record(Some(identifier), recordFields) def asRecordValue = Value(Value.Sum.Record(asRecord)) def asRecordValueOf(identifier: Identifier) = Value(Value.Sum.Record(asRecordOf(identifier))) private def recordFields: Seq[RecordField] = { labeledValues.map { case (k, v) => RecordField(k, Some(v)) } } } implicit class ValueSequences(val values: Seq[Value]) extends AnyVal { def asList = Value(DamlListValue(DamlList(values))) } implicit def value2Optional(value: Value): Option[Value] = Some(value) implicit class ExerciseCommands(val exercise: ExerciseCommand) extends AnyVal { def wrap = Command(Command.Command.Exercise(exercise)) } implicit class CreateCommands(val create: CreateCommand) extends AnyVal { def wrap = Command(Command.Command.Create(create)) } }
Example 55
Source File: DamlLfEncoderTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.testing.archive import java.io.File import com.daml.bazeltools.BazelRunfiles import com.daml.lf.archive.{Dar, UniversalArchiveReader} import com.daml.lf.data.Ref.{DottedName, PackageId} import com.daml.daml_lf_dev.DamlLf import org.scalatest.prop.TableDrivenPropertyChecks import org.scalatest.{Matchers, WordSpec} import scala.collection.JavaConverters._ import scala.language.implicitConversions class DamlLfEncoderTest extends WordSpec with Matchers with TableDrivenPropertyChecks with BazelRunfiles { "dar generated by encoder" should { "be readable" in { val modules_1_0 = Set[DottedName]( "UnitMod", "BoolMod", "Int64Mod", "TextMod", "DecimalMod", "DateMod", "TimestampMod", "ListMod", "PartyMod", "RecordMod", "VariantMod", "BuiltinMod", "TemplateMod", ) val modules_1_1 = modules_1_0 + "OptionMod" val modules_1_3 = modules_1_1 + "TextMapMod" val modules_1_6 = modules_1_3 + "EnumMod" val modules_1_7 = modules_1_6 + "NumericMod" val modules_1_8 = modules_1_7 + "SynonymMod" val modules_1_dev = modules_1_8 + "GenMapMod" val versions = Table( "versions" -> "modules", "1.0" -> modules_1_0, "1.1" -> modules_1_1, "1.3" -> modules_1_3, "1.6" -> modules_1_6, "1.7" -> modules_1_7, "1.8" -> modules_1_8, "1.dev" -> modules_1_dev ) forEvery(versions) { (version, expectedModules) => val dar = UniversalArchiveReader() .readFile(new File(rlocation(s"daml-lf/encoder/test-$version.dar"))) dar shouldBe 'success val findModules = dar.toOption.toList.flatMap(getModules).toSet findModules shouldBe expectedModules } } } private val preInternalizationVersions = List.range(0, 7).map(_.toString).toSet private def getModules(dar: Dar[(PackageId, DamlLf.ArchivePayload)]) = { for { pkgWithId <- dar.main +: dar.dependencies (_, pkg) = pkgWithId version = pkg.getMinor internedStrings = pkg.getDamlLf1.getInternedStringsList.asScala.toArray dottedNames = pkg.getDamlLf1.getInternedDottedNamesList.asScala.map( _.getSegmentsInternedStrList.asScala.map(internedStrings(_)) ) segments <- pkg.getDamlLf1.getModulesList.asScala.map( mod => if (preInternalizationVersions(version)) mod.getNameDname.getSegmentsList.asScala else dottedNames(mod.getNameInternedDname) ) } yield DottedName.assertFromSegments(segments) } private implicit def toDottedName(s: String): DottedName = DottedName.assertFromString(s) }
Example 56
Source File: PreprocessorSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf package engine package preprocessing import com.daml.lf.data._ import com.daml.lf.language.Ast.{TNat, TTyCon} import com.daml.lf.language.Util._ import com.daml.lf.testing.parser.Implicits._ import com.daml.lf.value.Value._ import org.scalatest.prop.TableDrivenPropertyChecks import org.scalatest.{Matchers, WordSpec} import scala.language.implicitConversions class PreprocessorSpec extends WordSpec with Matchers with TableDrivenPropertyChecks { import defaultParserParameters.{defaultPackageId => pkgId} private implicit def toName(s: String): Ref.Name = Ref.Name.assertFromString(s) val recordCon = Ref.Identifier(pkgId, Ref.QualifiedName.assertFromString("Module:Record")) val variantCon = Ref.Identifier(pkgId, Ref.QualifiedName.assertFromString("Module:Variant")) val enumCon = Ref.Identifier(pkgId, Ref.QualifiedName.assertFromString("Module:Enum")) val pkg = p""" module Module { record Record = { field : Int64 }; variant Variant = variant1 : Text | variant2 : Int64 ; enum Enum = value1 | value2; } """ "translateValue" should { val testCases = Table( "type" -> "value", TUnit -> ValueUnit, TBool -> ValueTrue, TInt64 -> ValueInt64(42), TTimestamp -> ValueTimestamp(Time.Timestamp.assertFromString("1969-07-20T20:17:00Z")), TDate -> ValueDate(Time.Date.assertFromString("1879-03-14")), TText -> ValueText("daml"), TNumeric(TNat(Decimal.scale)) -> ValueNumeric(Numeric.assertFromString("10.0000000000")), // TNumeric(TNat(9)) -> // ValueNumeric(Numeric.assertFromString("9.000000000")), TParty -> ValueParty(Ref.Party.assertFromString("Alice")), TContractId(TTyCon(recordCon)) -> ValueContractId(ContractId.assertFromString("#contractId")), TList(TText) -> ValueList(FrontStack(ValueText("a"), ValueText("b"))), TTextMap(TBool) -> ValueTextMap(SortedLookupList(Map("0" -> ValueTrue, "1" -> ValueFalse))), TOptional(TText) -> ValueOptional(Some(ValueText("text"))), TTyCon(recordCon) -> ValueRecord(None, ImmArray(Some[Ref.Name]("field") -> ValueInt64(33))), TTyCon(variantCon) -> ValueVariant(None, "variant1", ValueText("some test")), TTyCon(enumCon) -> ValueEnum(None, "value1"), ) val compiledPackage = ConcurrentCompiledPackages() assert(compiledPackage.addPackage(pkgId, pkg) == ResultDone.Unit) val preprocessor = new Preprocessor(compiledPackage) import preprocessor.translateValue "succeeds on well type values" in { forAll(testCases) { (typ, value) => translateValue(typ, value) shouldBe a[ResultDone[_]] } } "fails on non-well type values" in { forAll(testCases) { (typ1, value1) => forAll(testCases) { (_, value2) => if (value1 != value2) translateValue(typ1, value2) shouldBe a[ResultError] } } } } }
Example 57
Source File: LanguageMinorVersion.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.language sealed abstract class LanguageMinorVersion extends Product with Serializable { import LanguageMinorVersion._ def toProtoIdentifier: String = this match { case Stable(id) => id case Dev => "dev" } } object LanguageMinorVersion { final case class Stable(identifier: String) extends LanguageMinorVersion case object Dev extends LanguageMinorVersion def fromProtoIdentifier(identifier: String): LanguageMinorVersion = identifier match { case "dev" => Dev case _ => Stable(identifier) } object Implicits { import scala.language.implicitConversions implicit def `LMV from proto identifier`(identifier: String): LanguageMinorVersion = fromProtoIdentifier(identifier) } }
Example 58
Source File: SourceContext.scala From nanotest-strawman with Apache License 2.0 | 5 votes |
package verify package sourcecode abstract class SourceValue[T] { def value: T } abstract class SourceCompanion[T, V <: SourceValue[T]](build: T => V) { import scala.language.implicitConversions def apply()(implicit s: V): T = s.value implicit def toScalaVerifySourcecodeSourceValue(s: T): V = build(s) } case class Name(value: String) extends SourceValue[String] object Name extends SourceCompanion[String, Name](new Name(_)) with NameMacros { case class Machine(value: String) extends SourceValue[String] object Machine extends SourceCompanion[String, Machine](new Machine(_)) with NameMachineMacros } case class FullName(value: String) extends SourceValue[String] object FullName extends SourceCompanion[String, FullName](new FullName(_)) with FullNameMacros { case class Machine(value: String) extends SourceValue[String] object Machine extends SourceCompanion[String, Machine](new Machine(_)) with FullNameMachineMacros } case class SourceFileName(value: String) extends SourceValue[String] object SourceFileName extends SourceCompanion[String, SourceFileName](new SourceFileName(_)) with SourceFileNameMacros case class SourceFilePath(value: String) extends SourceValue[String] object SourceFilePath extends SourceCompanion[String, SourceFilePath](new SourceFilePath(_)) with SourceFilePathMacros case class Line(value: Int) extends SourceValue[Int] object Line extends SourceCompanion[Int, Line](new Line(_)) with LineMacros case class SourceLocation(fileName: String, filePath: String, line: Int) object SourceLocation { implicit def toScalaVerifySourcecodeSourceLocation( implicit n: SourceFileName, p: SourceFilePath, l: Line ): SourceLocation = SourceLocation(n.value, p.value, l.value) def apply()(implicit s: SourceLocation): SourceLocation = s } case class Enclosing(value: String) extends SourceValue[String] object Enclosing extends SourceCompanion[String, Enclosing](new Enclosing(_)) with EnclosingMacros { case class Machine(value: String) extends SourceValue[String] object Machine extends SourceCompanion[String, Machine](new Machine(_)) with EnclosingMachineMacros } case class Pkg(value: String) extends SourceValue[String] object Pkg extends SourceCompanion[String, Pkg](new Pkg(_)) with PkgMacros case class Text[T](value: T, source: String) object Text extends TextMacros
Example 59
Source File: SourceContext.scala From nanotest-strawman with Apache License 2.0 | 5 votes |
package verify package sourcecode private[sourcecode] trait SourceValue[T]{ def value: T } private[sourcecode] trait SourceCompanion[T, V <: SourceValue[T]](build: T => V){ def apply()(using V): T = summon[V].value } case class Name(value: String) extends SourceValue[String] object Name extends NameMacros with SourceCompanion[String, Name](new Name(_)) { case class Machine(value: String) extends SourceValue[String] object Machine extends NameMachineMacros with SourceCompanion[String, Machine](new Machine(_)) } case class FullName(value: String) extends SourceValue[String] object FullName extends FullNameMacros with SourceCompanion[String, FullName](new FullName(_)) { case class Machine(value: String) extends SourceValue[String] object Machine extends FullNameMachineMacros with SourceCompanion[String, Machine](new Machine(_)) } case class SourceFileName(value: String) extends SourceValue[String] object SourceFileName extends SourceFileNameMacros with SourceCompanion[String, SourceFileName](new SourceFileName(_)) case class SourceFilePath(value: String) extends SourceValue[String] object SourceFilePath extends SourceFilePathMacros with SourceCompanion[String, SourceFilePath](new SourceFilePath(_)) case class Line(value: Int) extends SourceValue[Int] object Line extends LineMacros with SourceCompanion[Int, Line](new Line(_)) case class SourceLocation(fileName: String, filePath: String, line: Int) object SourceLocation { import scala.language.implicitConversions implicit def toScalaVerifySourcecodeSourceLocation(implicit n: SourceFileName, p: SourceFilePath, l: Line): SourceLocation = SourceLocation(n.value, p.value, l.value) def apply()(using SourceLocation): SourceLocation = summon[SourceLocation] } case class Enclosing(value: String) extends SourceValue[String] object Enclosing extends EnclosingMacros with SourceCompanion[String, Enclosing](new Enclosing(_)) { case class Machine(value: String) extends SourceValue[String] object Machine extends EnclosingMachineMacros with SourceCompanion[String, Machine](new Machine(_)) } case class Pkg(value: String) extends SourceValue[String] object Pkg extends PkgMacros with SourceCompanion[String, Pkg](new Pkg(_)) case class Text[T](value: T, source: String) object Text extends TextMacros
Example 60
Source File: Value.scala From scarango with MIT License | 5 votes |
package com.outr.arango import io.circe.Json import scala.language.implicitConversions case class Value(json: Json) object Value { implicit def string(value: String): Value = if (value != null) Value(Json.fromString(value)) else Value(Json.Null) implicit def string(value: Option[String]): Value = Value(value.map(Json.fromString).getOrElse(Json.Null)) implicit def boolean(value: Boolean): Value = Value(Json.fromBoolean(value)) implicit def boolean(value: Option[Boolean]): Value = Value(value.map(Json.fromBoolean).getOrElse(Json.Null)) implicit def int(value: Int): Value = Value(Json.fromInt(value)) implicit def int(value: Option[Int]): Value = Value(value.map(Json.fromInt).getOrElse(Json.Null)) implicit def long(value: Long): Value = Value(Json.fromLong(value)) implicit def long(value: Option[Long]): Value = Value(value.map(Json.fromLong).getOrElse(Json.Null)) implicit def double(value: Double): Value = Value(Json.fromDouble(value).get) implicit def double(value: Option[Double]): Value = Value(value.map(Json.fromDouble(_).get).getOrElse(Json.Null)) implicit def bigDecimal(value: BigDecimal): Value = Value(Json.fromBigDecimal(value)) implicit def bigDecimal(value: Option[BigDecimal]): Value = Value(value.map(Json.fromBigDecimal).getOrElse(Json.Null)) implicit def values(values: Seq[Value]): Value = Value(Json.arr(values.map(_.json): _*)) implicit def strings(value: Seq[String]): Value = conv[String](value, string) implicit def booleans(value: Seq[Boolean]): Value = conv[Boolean](value, boolean) implicit def ints(value: Seq[Int]): Value = conv[Int](value, int) implicit def longs(value: Seq[Long]): Value = conv[Long](value, long) implicit def doubles(value: Seq[Double]): Value = conv[Double](value, double) implicit def bigDecimals(value: Seq[BigDecimal]): Value = conv[BigDecimal](value, bigDecimal) implicit def id[T](value: Id[T]): Value = string(value._id) implicit def json(value: Json): Value = Value(value) private def conv[T](seq: Seq[T], converter: T => Value): Value = { val values = seq.toList.map(converter).map(_.json) Value(Json.arr(values: _*)) } }
Example 61
Source File: Interpolation.scala From slick-jdbc-extension-scala with MIT License | 5 votes |
package com.github.tarao package slickjdbc package interpolation import scala.language.implicitConversions import slick.jdbc.SQLActionBuilder import slick.sql.SqlAction import slick.dbio.{NoStream, Effect} trait SQLInterpolation { implicit def interpolation(s: StringContext) = SQLInterpolationImpl(s) } object SQLInterpolation extends SQLInterpolation case class SQLInterpolationImpl(s: StringContext) extends AnyVal { import scala.language.experimental.macros def sql(param: Any*): SQLActionBuilder = macro MacroTreeBuilder.sqlImpl def sqlu(param: Any*): SqlAction[Int, NoStream, Effect] = macro MacroTreeBuilder.sqluImpl } trait Literal class SimpleString(value: String) extends Literal { override def toString = value } case class TableName(name: String) extends SimpleString(name)
Example 62
Source File: TestDB.scala From slick-jdbc-extension-scala with MIT License | 5 votes |
package com.github.tarao package slickjdbc package helper import scala.language.implicitConversions import scala.concurrent.duration.Duration import org.scalatest.{FunSpec, BeforeAndAfterAll, BeforeAndAfterEach} import slick.jdbc.H2Profile.api.Database case class Timeout(duration: Duration) object Timeout { implicit val forever: Timeout = Timeout(Duration.Inf) } class DBRunner(val db: Database) { import scala.concurrent.{Future, Await} import slick.driver.H2Driver.api.Database import slick.dbio.{DBIOAction, NoStream, Effect} def run[R](a: DBIOAction[R, NoStream, Nothing])(implicit timeout: Timeout ): R = Await.result(db.run(a), timeout.duration) def close = db.close } object FreshId { var id = 0 def apply() = { id = max; id } def max = { id + 1 } } trait Repository { def db: DBRunner } trait TestDB extends BeforeAndAfterAll with BeforeAndAfterEach { self: FunSpec => lazy val config = { import com.typesafe.config.{ConfigFactory, ConfigValueFactory => V} import slick.jdbc.JdbcDataSource // Rewrite database name to thread local one so that writing from // multiple test threads run parallel won't conflict each other. val c = ConfigFactory.load.getConfig("h2memtest") val name = "test" + Thread.currentThread.getId val url = c.getString("url").replaceFirst("""\btest\b""", name) c.withValue("url", V.fromAnyRef(url)) } lazy val db = new DBRunner(Database.forConfig("", config)) override def beforeAll = { import slick.driver.H2Driver.api._ db.run { sqlu""" CREATE TABLE IF NOT EXISTS entry ( entry_id BIGINT NOT NULL PRIMARY KEY, url VARCHAR(2048) NOT NULL UNIQUE ) """ } db.run { sqlu""" CREATE TABLE IF NOT EXISTS ids ( id BIGINT NOT NULL PRIMARY KEY ) """ } super.beforeAll } override def afterAll = { db.close super.afterAll } }
Example 63
Source File: VertexPartition.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.graphx.impl import scala.reflect.ClassTag import org.apache.spark.graphx._ import org.apache.spark.util.collection.BitSet private[graphx] object VertexPartition { private[graphx] class VertexPartition[VD: ClassTag]( val index: VertexIdToIndexMap, val values: Array[VD], val mask: BitSet) extends VertexPartitionBase[VD] private[graphx] class VertexPartitionOps[VD: ClassTag](self: VertexPartition[VD]) extends VertexPartitionBaseOps[VD, VertexPartition](self) { def withIndex(index: VertexIdToIndexMap): VertexPartition[VD] = { new VertexPartition(index, self.values, self.mask) } def withValues[VD2: ClassTag](values: Array[VD2]): VertexPartition[VD2] = { new VertexPartition(self.index, values, self.mask) } def withMask(mask: BitSet): VertexPartition[VD] = { new VertexPartition(self.index, self.values, mask) } }
Example 64
Source File: StreamingTestMethod.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.stat.test import java.io.Serializable import scala.language.implicitConversions import scala.math.pow import com.twitter.chill.MeatLocker import org.apache.commons.math3.stat.descriptive.StatisticalSummaryValues import org.apache.commons.math3.stat.inference.TTest import org.apache.spark.internal.Logging import org.apache.spark.streaming.dstream.DStream import org.apache.spark.util.StatCounter private[stat] object StreamingTestMethod { // Note: after new `StreamingTestMethod`s are implemented, please update this map. private final val TEST_NAME_TO_OBJECT: Map[String, StreamingTestMethod] = Map( "welch" -> WelchTTest, "student" -> StudentTTest) def getTestMethodFromName(method: String): StreamingTestMethod = TEST_NAME_TO_OBJECT.get(method) match { case Some(test) => test case None => throw new IllegalArgumentException( "Unrecognized method name. Supported streaming test methods: " + TEST_NAME_TO_OBJECT.keys.mkString(", ")) } }
Example 65
Source File: StatusTrackerSuite.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark import scala.concurrent.duration._ import scala.language.implicitConversions import scala.language.postfixOps import org.scalatest.Matchers import org.scalatest.concurrent.Eventually._ import org.apache.spark.JobExecutionStatus._ class StatusTrackerSuite extends SparkFunSuite with Matchers with LocalSparkContext { test("basic status API usage") { sc = new SparkContext("local", "test", new SparkConf(false)) val jobFuture = sc.parallelize(1 to 10000, 2).map(identity).groupBy(identity).collectAsync() val jobId: Int = eventually(timeout(10 seconds)) { val jobIds = jobFuture.jobIds jobIds.size should be(1) jobIds.head } val jobInfo = eventually(timeout(10 seconds)) { sc.statusTracker.getJobInfo(jobId).get } jobInfo.status() should not be FAILED val stageIds = jobInfo.stageIds() stageIds.size should be(2) val firstStageInfo = eventually(timeout(10 seconds)) { sc.statusTracker.getStageInfo(stageIds(0)).get } firstStageInfo.stageId() should be(stageIds(0)) firstStageInfo.currentAttemptId() should be(0) firstStageInfo.numTasks() should be(2) eventually(timeout(10 seconds)) { val updatedFirstStageInfo = sc.statusTracker.getStageInfo(stageIds(0)).get updatedFirstStageInfo.numCompletedTasks() should be(2) updatedFirstStageInfo.numActiveTasks() should be(0) updatedFirstStageInfo.numFailedTasks() should be(0) } } test("getJobIdsForGroup()") { sc = new SparkContext("local", "test", new SparkConf(false)) // Passing `null` should return jobs that were not run in a job group: val defaultJobGroupFuture = sc.parallelize(1 to 1000).countAsync() val defaultJobGroupJobId = eventually(timeout(10 seconds)) { defaultJobGroupFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup(null).toSet should be (Set(defaultJobGroupJobId)) } // Test jobs submitted in job groups: sc.setJobGroup("my-job-group", "description") sc.statusTracker.getJobIdsForGroup("my-job-group") should be (Seq.empty) val firstJobFuture = sc.parallelize(1 to 1000).countAsync() val firstJobId = eventually(timeout(10 seconds)) { firstJobFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup("my-job-group") should be (Seq(firstJobId)) } val secondJobFuture = sc.parallelize(1 to 1000).countAsync() val secondJobId = eventually(timeout(10 seconds)) { secondJobFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup("my-job-group").toSet should be ( Set(firstJobId, secondJobId)) } } test("getJobIdsForGroup() with takeAsync()") { sc = new SparkContext("local", "test", new SparkConf(false)) sc.setJobGroup("my-job-group2", "description") sc.statusTracker.getJobIdsForGroup("my-job-group2") shouldBe empty val firstJobFuture = sc.parallelize(1 to 1000, 1).takeAsync(1) val firstJobId = eventually(timeout(10 seconds)) { firstJobFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup("my-job-group2") should be (Seq(firstJobId)) } } test("getJobIdsForGroup() with takeAsync() across multiple partitions") { sc = new SparkContext("local", "test", new SparkConf(false)) sc.setJobGroup("my-job-group2", "description") sc.statusTracker.getJobIdsForGroup("my-job-group2") shouldBe empty val firstJobFuture = sc.parallelize(1 to 1000, 2).takeAsync(999) val firstJobId = eventually(timeout(10 seconds)) { firstJobFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup("my-job-group2") should have size 2 } } }
Example 66
Source File: TFTensorNumeric.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.utils.tf import com.google.protobuf.ByteString import com.intel.analytics.bigdl.tensor.{ConvertableFrom, StringType, TensorDataType} import com.intel.analytics.bigdl.tensor.TensorNumericMath.UndefinedTensorNumeric import scala.language.implicitConversions object TFTensorNumeric { implicit object NumericByteString extends UndefinedTensorNumeric[ByteString]("ByteString") { override def getType(): TensorDataType = StringType override def plus(x: ByteString, y: ByteString): ByteString = x.concat(y) override def fromType[K](k: K)(implicit c: ConvertableFrom[K]): ByteString = { ByteString.copyFromUtf8(k.toString) } override def axpy(n: Int, da: ByteString, dx: Array[ByteString], _dx_offset: Int, incx: Int, dy: Array[ByteString], _dy_offset: Int, incy: Int): Unit = { var i = 0 while (i < n) { dy(i + _dy_offset) = dx(_dx_offset + i).concat(dy(_dy_offset + i)) i += 1 } } override def nearlyEqual(a: ByteString, b: ByteString, epsilon: Double): Boolean = { a == b } } }
Example 67
Source File: package.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics import java.util.Properties import com.intel.analytics.bigdl.dataset.AbstractDataSet import com.intel.analytics.bigdl.nn.abstractnn.Activity import scala.language.implicitConversions package object bigdl { type Module[T] = com.intel.analytics.bigdl.nn.abstractnn.AbstractModule[Activity, Activity, T] type Criterion[T] = com.intel.analytics.bigdl.nn.abstractnn.AbstractCriterion[Activity, Activity, T] implicit def convModule[T]( module: com.intel.analytics.bigdl.nn.abstractnn.AbstractModule[_, _, T] ): Module[T] = module.asInstanceOf[Module[T]] implicit def convCriterion[T]( criterion: com.intel.analytics.bigdl.nn.abstractnn.AbstractCriterion[_, _, T] ): Criterion[T] = criterion.asInstanceOf[Criterion[T]] val numeric = com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric type DataSet[D] = AbstractDataSet[D, _] private object BigDLBuildInfo { val version: String = { val resourceStream = Thread.currentThread().getContextClassLoader. getResourceAsStream("bigdl-version-info.properties") try { val unknownProp = "<unknown>" val props = new Properties() props.load(resourceStream) props.getProperty("version", unknownProp) } catch { case npe: NullPointerException => throw new IllegalArgumentException ("Error while locating file bigdl-version-info.properties") case e: Exception => throw new IllegalArgumentException ("Error loading properties from bigdl-version-info.propertiess") } finally { if (resourceStream != null) { try { resourceStream.close() } catch { case e: Exception => throw new IllegalArgumentException ("Error closing bigdl build info resource stream", e) } } } } } val BIGDL_VERSION = BigDLBuildInfo.version }
Example 68
Source File: RxPromise.scala From scalajs-rxjs with MIT License | 5 votes |
// Project: scalajs-rxjs // Module: RxPromise // Description: Provides an extension of js.Promise with simplified event handling // Copyright (c) 2016. Distributed under the MIT License (see included LICENSE file). package rxjs import scala.scalajs.js import scala.scalajs.js.annotation.{JSGlobal, JSName} import scala.language.implicitConversions @js.native trait RxPromise[T] extends js.Promise[T] { @JSName("then") def andThen[R](onFulfilled: js.Function1[T,R]): RxPromise[R] = js.native @JSName("then") def andThen[R](onFulfilled: js.Function1[T,R], onRejected: js.UndefOr[js.Function1[js.Any,R]]): RxPromise[R] = js.native @JSName("catch") def orCatch(onError: js.Function1[js.Any,_]): RxPromise[T] = js.native } object RxPromise { @inline implicit def toObservable[T](p: RxPromise[T]): Observable[T] = Observable.fromPromise(p) type ResolveFun[T] = Function1[T,js.Promise[T]] type RejectFun = Function1[Any,js.Promise[Nothing]] @JSGlobal("Promise") @js.native private class Impl[T](executor: js.Function2[js.Function1[T,Unit],js.Function1[Any,Unit],Any]) extends js.Object def apply[T](executor: Function2[js.Function1[T,Unit],js.Function1[Any,Unit],Any]): RxPromise[T] = new Impl(executor).asInstanceOf[RxPromise[T]] def resolve[T](value: T): RxPromise[T] = js.Promise.resolve[T](value).asInstanceOf[RxPromise[T]] def reject(reason: Any): RxPromise[Nothing] = js.Promise.reject(reason).asInstanceOf[RxPromise[Nothing]] implicit final class RichRxPromise[T](val p: RxPromise[T]) extends AnyVal { @inline def map[R](f: T=>R): RxPromise[R] = p.andThen(f) @inline @deprecated("Use map() instead","0.0.2") def onFulfilled[R](f: T=>R): RxPromise[R] = p.andThen(f) @inline def onError(f: js.Any=>_): RxPromise[T] = p.orCatch(f) } }
Example 69
Source File: JavaAgent.scala From sbt-javaagent with Apache License 2.0 | 5 votes |
package com.lightbend.sbt.javaagent import sbt._ import sbt.Keys._ def apply(module: ModuleID, name: String = null, scope: AgentScope = AgentScope(), arguments: String = null): AgentModule = { val agentName = Option(name).getOrElse(module.name) val agentArguments = Option(arguments).map("=" + _).getOrElse("") val confs = module.configurations.toSeq.flatMap(_.split(";")) val inCompile = scope.compile || confs.contains(Compile.name) || confs.contains(Provided.name) val inRun = scope.run || inCompile || confs.contains(Runtime.name) val inTest = scope.test || confs.contains(Test.name) val inDist = scope.dist val configuration = if (inCompile) Provided else AgentConfig val reconfiguredModule = Modules.withConfigurations(module, Some(configuration.name)) val configuredScope = AgentScope(compile = inCompile, test = inTest, run = inRun, dist = inDist) AgentModule(agentName, reconfiguredModule, configuredScope, agentArguments) } override def requires = plugins.JvmPlugin override def projectSettings = Seq( javaAgents := Seq.empty, ivyConfigurations += AgentConfig, libraryDependencies ++= javaAgents.value.map(_.module), resolvedJavaAgents := resolveAgents.value, fork in run := enableFork(fork in run, _.scope.run).value, connectInput in run := enableFork(fork in run, _.scope.run).value, fork in Test := enableFork(fork in Test, _.scope.test).value, javaOptions in run ++= agentOptions(_.agent.scope.run).value, javaOptions in Test ++= agentOptions(_.agent.scope.test).value, fullClasspath in Test := filterAgents((fullClasspath in Test).value, resolvedJavaAgents.value) ) private def resolveAgents = Def.task[Seq[ResolvedAgent]] { javaAgents.value flatMap { agent => update.value.matching(Modules.exactFilter(agent.module)).headOption map { jar => ResolvedAgent(agent, jar) } } } private def enableFork(forkKey: SettingKey[Boolean], enabled: AgentModule => Boolean) = Def.setting[Boolean] { forkKey.value || javaAgents.value.exists(enabled) } private def agentOptions(enabled: ResolvedAgent => Boolean) = Def.task[Seq[String]] { resolvedJavaAgents.value filter enabled map { resolved => "-javaagent:" + resolved.artifact.absolutePath + resolved.agent.arguments } } def filterAgents(classpath: Classpath, resolvedAgents: Seq[ResolvedAgent]): Classpath = { val agents = resolvedAgents.map(resolved => resolved.artifact.absolutePath) classpath.filter(aFile => !agents.contains(aFile.data.getAbsolutePath)) } }
Example 70
Source File: Implicits.scala From chronoscala with MIT License | 5 votes |
package jp.ne.opt.chronoscala import java.time._ import jp.ne.opt.chronoscala.Tag.CS import scala.language.implicitConversions trait Implicits extends IntImplicits with DurationImplicits with TimeImplicits with OrderingImplicits trait NamespacedImplicits extends NamespacedIntImplicits with NamespacedLongImplicits with DurationImplicits with TimeImplicits with OrderingImplicits trait IntImplicits { implicit def richInt(n: Int): RichInt = new RichInt(n) } trait NamespacedIntImplicits { implicit def richIntCs(n: Int): RichAny[Int] = new RichAny(n) implicit def richCsInt(n: CS[Int]): RichInt = new RichInt(n) } trait NamespacedLongImplicits { implicit def richLongCs(n: Long): RichAny[Long] = new RichAny(n) implicit def richCsLong(n: CS[Long]): RichLong = new RichLong(n) } trait DurationImplicits { implicit def richDuration(d: Duration): RichDuration = new RichDuration(d) implicit def richPeriod(p: Period): RichPeriod = new RichPeriod(p) } trait TimeImplicits { implicit def richZonedDateTime(t: ZonedDateTime): RichZonedDateTime = new RichZonedDateTime(t) implicit def richOffsetDateTime(t: OffsetDateTime): RichOffsetDateTime = new RichOffsetDateTime(t) implicit def richLocalDateTime(t: LocalDateTime): RichLocalDateTime = new RichLocalDateTime(t) implicit def richLocalTime(t: LocalTime): RichLocalTime = new RichLocalTime(t) implicit def richLocalDate(t: LocalDate): RichLocalDate = new RichLocalDate(t) implicit def richInstant(i: Instant): RichInstant = new RichInstant(i) } trait OrderingImplicits { implicit val zonedDateTimeOrdering: Ordering[ZonedDateTime] = Ordering.fromLessThan(_ isBefore _) implicit val offsetDateTimeOrdering: Ordering[OffsetDateTime] = Ordering.fromLessThan(_ isBefore _) implicit val localDateTimeOrdering: Ordering[LocalDateTime] = Ordering.fromLessThan(_ isBefore _) implicit val localDateOrdering: Ordering[LocalDate] = Ordering.fromLessThan(_ isBefore _) implicit val localTimeOrdering: Ordering[LocalTime] = Ordering.fromLessThan(_ isBefore _) implicit val instantOrdering: Ordering[Instant] = Ordering.fromLessThan(_ isBefore _) }
Example 71
Source File: implicits.scala From embulk-output-s3_parquet with MIT License | 5 votes |
package org.embulk.output.s3_parquet import java.util.{Optional, Iterator => JIterator, List => JList, Map => JMap} import com.google.common.base.{Optional => GoogleOptional} import scala.jdk.CollectionConverters._ import scala.language.implicitConversions case object implicits { implicit def JList2Seq[A](a: JList[A]): Seq[A] = a.asScala.toSeq implicit def Seq2JList[A](a: Seq[A]): JList[A] = a.asJava implicit def JIte2Ite[A](a: JIterator[A]): Iterator[A] = a.asScala implicit def Ite2JIte[A](a: Iterator[A]): JIterator[A] = a.asJava implicit def OptionalJList2OptionSeq[A]( a: Optional[JList[A]] ): Option[Seq[A]] = a.map(JList2Seq(_)) implicit def OptionSeq2OptionalJList[A]( a: Option[Seq[A]] ): Optional[JList[A]] = a.map(Seq2JList) implicit def JMap2Map[K, V](a: JMap[K, V]): Map[K, V] = a.asScala.toMap implicit def Map2JMap[K, V](a: Map[K, V]): JMap[K, V] = a.asJava implicit def OptionalJMap2OptionMap[K, V]( a: Optional[JMap[K, V]] ): Option[Map[K, V]] = a.map(JMap2Map(_)) implicit def OptionMap2Optional2JMap[K, V]( a: Option[Map[K, V]] ): Optional[JMap[K, V]] = a.map(Map2JMap) implicit def Optional2Option[A](a: Optional[A]): Option[A] = if (a.isPresent) Some(a.get()) else None implicit def Option2Optional[A](a: Option[A]): Optional[A] = a match { case Some(v) => Optional.of(v) case None => Optional.empty() } implicit def GoogleOptional2Option[A](a: GoogleOptional[A]): Option[A] = Option(a.orNull()) implicit def Option2GoogleOptional[A](a: Option[A]): GoogleOptional[A] = a match { case Some(v) => GoogleOptional.of(v) case None => GoogleOptional.absent() } }
Example 72
Source File: KafkaRDDFunctions.scala From incubator-s2graph with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.kafka import org.apache.spark.Logging import org.apache.spark.rdd.RDD import scala.language.implicitConversions import scala.reflect.ClassTag class KafkaRDDFunctions[T: ClassTag](self: RDD[T]) extends Logging with Serializable { def foreachPartitionWithOffsetRange(f: (OffsetRange, Iterator[T]) => Unit): Unit = { val offsets = self.asInstanceOf[HasOffsetRanges].offsetRanges foreachPartitionWithIndex { (i, part) => val osr: OffsetRange = offsets(i) f(osr, part) } } def foreachPartitionWithIndex(f: (Int, Iterator[T]) => Unit): Unit = { self.mapPartitionsWithIndex[Nothing] { (i, part) => f(i, part) Iterator.empty }.foreach { (_: Nothing) => () } } } object KafkaRDDFunctions { implicit def rddToKafkaRDDFunctions[T: ClassTag](rdd: RDD[T]): KafkaRDDFunctions[T] = { new KafkaRDDFunctions(rdd) } }
Example 73
Source File: ReadDataMapperFactory.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.rdd.mapper import com.basho.riak.client.core.query.{Location, RiakObject} import com.basho.riak.spark.rdd.BucketDef import com.basho.riak.spark.util.DataMapper import scala.language.implicitConversions import scala.reflect.ClassTag trait ReadDataMapper[T] extends DataMapper { def mapValue(location: Location, riakObject: RiakObject)(implicit ct: ClassTag[T]): T } trait ReadDataMapperFactory[T] extends Serializable { def dataMapper(bucketDef: BucketDef): ReadDataMapper[T] def targetClass: Class[T] } trait ReadDataMapperAsFactory[T] extends ReadDataMapperFactory[T] { this: ReadDataMapper[T] => override def dataMapper(bucketDef: BucketDef): ReadDataMapper[T] = this } trait LowPriorityReadDataMapperFactoryImplicits { // scalastyle:off null trait IsNotSubclassOf[A, B] implicit def nsub[A, B]: A IsNotSubclassOf B = null implicit def nsubAmbiguity1[A, B >: A]: A IsNotSubclassOf B = null implicit def nsubAmbiguity2[A, B >: A]: A IsNotSubclassOf B = null // scalastyle:on null implicit def singleValueReaderFactory[T: ClassTag](implicit ev1: T IsNotSubclassOf (_, _), ev2: T IsNotSubclassOf (_, _, _) ): ReadDataMapperFactory[T] = ReadValueDataMapper.factory // K =:= String is used only String keys are supported for now implicit def pairValueReaderFactory[K: ClassTag, V: ClassTag](implicit ev: K =:= String ): ReadDataMapperFactory[(K, V)] = ReadPairValueDataMapper.factory } object ReadDataMapperFactory extends LowPriorityReadDataMapperFactoryImplicits { // Any is using because RDD type will be inferred by RiakObject content type implicit object DefaultReadDataMapper extends ReadDataMapper[(String, Any)] with ReadDataMapperAsFactory[(String, Any)] { override def mapValue(location: Location, riakObject: RiakObject )(implicit ct: ClassTag[(String, Any)] ): (String, Any) = location.getKeyAsString -> ReadValueDataMapper.mapValue[Any](location, riakObject) override def targetClass: Class[(String, Any)] = classOf[(String, Any)] } }
Example 74
import sbt._ import Keys._ import com.typesafe.sbt.pgp.PgpKeys._ import org.scalajs.sbtplugin.ScalaJSPlugin import sbtcrossproject.CrossProject import ScalaJSPlugin.autoImport._ import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._ import sbtcrossproject.CrossPlugin.autoImport._ import scalajscrossproject.ScalaJSCrossPlugin.autoImport._ import scala.language.implicitConversions object Lib { type CPE = CrossProject => CrossProject type PE = Project => Project class ConfigureBoth(val jvm: PE, val js: PE) { def jvmConfigure(f: PE) = new ConfigureBoth(f compose jvm, js) def jsConfigure(f: PE) = new ConfigureBoth(jvm, f compose js) } def ConfigureBoth(both: PE) = new ConfigureBoth(both, both) implicit def _configureBothToCPE(p: ConfigureBoth): CPE = _.jvmConfigure(p.jvm).jsConfigure(p.js) def addCommandAliases(m: (String, String)*): PE = { val s = m.map(p => addCommandAlias(p._1, p._2)).reduce(_ ++ _) _.settings(s: _*) } implicit class CrossProjectExt(val cp: CrossProject) extends AnyVal { def bothConfigure(fs: PE*): CrossProject = fs.foldLeft(cp)((q, f) => q.jvmConfigure(f).jsConfigure(f)) } implicit def CrossProjectExtB(b: CrossProject.Builder) = new CrossProjectExt(b) def publicationSettings(ghProject: String) = ConfigureBoth( _.settings( publishTo := { val nexus = "https://oss.sonatype.org/" if (isSnapshot.value) Some("snapshots" at nexus + "content/repositories/snapshots") else Some("releases" at nexus + "service/local/staging/deploy/maven2") }, pomExtra := <scm> <connection>scm:git:github.com/japgolly/{ghProject}</connection> <developerConnection>scm:git:[email protected]:japgolly/{ghProject}.git</developerConnection> <url>github.com:japgolly/{ghProject}.git</url> </scm> <developers> <developer> <id>japgolly</id> <name>David Barri</name> </developer> </developers>)) .jsConfigure( sourceMapsToGithub(ghProject)) def sourceMapsToGithub(ghProject: String): PE = p => p.settings( scalacOptions ++= (if (isSnapshot.value) Seq.empty else Seq({ val a = p.base.toURI.toString.replaceFirst("[^/]+/?$", "") val g = s"https://raw.githubusercontent.com/japgolly/$ghProject" s"-P:scalajs:mapSourceURI:$a->$g/v${version.value}/" })) ) def preventPublication: PE = _.settings( publish := {}, publishLocal := {}, publishSigned := {}, publishLocalSigned := {}, publishArtifact := false, publishTo := Some(Resolver.file("Unused transient repository", target.value / "fakepublish")), packagedArtifacts := Map.empty) // .disablePlugins(plugins.IvyPlugin) }
Example 75
Source File: ValueImplicits.scala From parquet4s with MIT License | 5 votes |
package com.github.mjakubowski84.parquet4s import scala.language.implicitConversions object ValueImplicits extends AllValueCodecs { implicit val valueCodecConfiguration: ValueCodecConfiguration = ValueCodecConfiguration.default implicit def valueConversion[T](value: T)(implicit valueCodec: ValueCodec[T], configuration: ValueCodecConfiguration ): Value = valueCodec.encode(value, configuration) implicit def leftTupleConversion[A](tuple: (A, Value))(implicit valueACodec: ValueCodec[A], configuration: ValueCodecConfiguration ): (Value, Value) = (valueACodec.encode(tuple._1, configuration), tuple._2) implicit def rightTupleConversion[B](tuple: (Value, B))(implicit valueBCodec: ValueCodec[B], configuration: ValueCodecConfiguration ): (Value, Value) = (tuple._1, valueBCodec.encode(tuple._2, configuration)) implicit def tupleConversion[A, B](tuple: (A, B))(implicit valueACodec: ValueCodec[A], valueBCodec: ValueCodec[B], configuration: ValueCodecConfiguration ): (Value, Value) = (valueACodec.encode(tuple._1, configuration), valueBCodec.encode(tuple._2, configuration)) }
Example 76
Source File: CollectionTransformer.scala From parquet4s with MIT License | 5 votes |
package com.github.mjakubowski84.parquet4s import scala.language.{higherKinds, implicitConversions} import scala.reflect.ClassTag def to(list: List[Element]): Col[Element] } @deprecated(message = "This object is no longer in use", since = "1.2.0") object CollectionTransformer { implicit def seqTransformer[E]: CollectionTransformer[E, Seq] = new CollectionTransformer[E, Seq] { override def from(col: Seq[E]): List[E] = col.toList override def to(list: List[E]): Seq[E] = list } implicit def listTransformer[E]: CollectionTransformer[E, List] = new CollectionTransformer[E, List] { override def from(col: List[E]): List[E] = col override def to(list: List[E]): List[E] = list } implicit def vectorTransformer[E]: CollectionTransformer[E, Vector] = new CollectionTransformer[E, Vector] { override def from(col: Vector[E]): List[E] = col.toList override def to(list: List[E]): Vector[E] = list.toVector } implicit def setTransformer[E]: CollectionTransformer[E, Set] = new CollectionTransformer[E, Set] { override def from(col: Set[E]): List[E] = col.toList override def to(list: List[E]): Set[E] = list.toSet } implicit def arrayTransformer[E : ClassTag]: CollectionTransformer[E, Array] = new CollectionTransformer[E, Array] { override def from(col: Array[E]): List[E] = col.toList override def to(list: List[E]): Array[E] = list.toArray } }
Example 77
Source File: Deserializer.scala From almaren-framework with Apache License 2.0 | 5 votes |
package com.github.music.of.the.ainur.almaren.state.core import com.github.music.of.the.ainur.almaren.State import org.apache.spark.sql.DataFrame import org.apache.spark.sql.types.{DataType, StructType} import scala.language.implicitConversions import com.github.music.of.the.ainur.almaren.Almaren import com.github.music.of.the.ainur.almaren.util.Constants import org.apache.spark.sql.Dataset abstract class Deserializer() extends State { override def executor(df: DataFrame): DataFrame = deserializer(df) def deserializer(df: DataFrame): DataFrame implicit def string2Schema(schema: String): DataType = StructType.fromDDL(schema) } case class AvroDeserializer(columnName: String,schema: String) extends Deserializer { import org.apache.spark.sql.avro._ import org.apache.spark.sql.functions._ override def deserializer(df: DataFrame): DataFrame = { logger.info(s"columnName:{$columnName}, schema:{$schema}") df.withColumn(columnName,from_avro(col(columnName),schema)) .select("*",columnName.concat(".*")).drop(columnName) } } case class JsonDeserializer(columnName: String,schema: Option[String]) extends Deserializer { import org.apache.spark.sql.functions._ override def deserializer(df: DataFrame): DataFrame = { import df.sparkSession.implicits._ logger.info(s"columnName:{$columnName}, schema:{$schema}") df.withColumn(columnName, from_json(col(columnName), schema.getOrElse(getSchemaDDL(df.selectExpr(columnName).as[(String)])))) .select("*",columnName.concat(".*")) .drop(columnName) } private def getSchemaDDL(df: Dataset[String]): String = Almaren.spark.getOrCreate().read.json(df.sample(Constants.sampleDeserializer)).schema.toDDL } case class XMLDeserializer(columnName: String) extends Deserializer { import com.databricks.spark.xml.XmlReader override def deserializer(df: DataFrame): DataFrame = { logger.info(s"columnName:{$columnName}") new XmlReader().xmlRdd(df.sparkSession,df.select(columnName).rdd.map(r => r(0).asInstanceOf[String])).toDF } }
Example 78
Source File: Core.scala From almaren-framework with Apache License 2.0 | 5 votes |
package com.github.music.of.the.ainur.almaren.builder import com.github.music.of.the.ainur.almaren.builder.core.{Deserializer, Main, Source, Target} import com.github.music.of.the.ainur.almaren.{NullFork, State, Tree} import scala.language.implicitConversions import org.apache.spark.sql.DataFrame import com.github.music.of.the.ainur.almaren.Executor import com.github.music.of.the.ainur.almaren.NullCatalyst trait Core { val container: Option[Tree] import scala.language.implicitConversions implicit def state2Tree(state: State): Option[Tree] = container match { case Some(t) => t.copy(c= t.c :+ Tree(state)) case None => Tree(state) } def fork(containers: Option[Tree]*): Option[Tree] = { val cr = container.getOrElse(throw NullCatalyst) val tree = cr.c.last.copy(c = containers.flatMap(c => c).toList) cr.copy(c = cr.c.init :+ tree) } } object Core { implicit class Implicit(val container: Option[Tree]) extends Source with Main with Target with Deserializer with Executor { def batch: DataFrame = batch(container) } }
Example 79
Source File: _10_ImplicitConversion.scala From LearningScala with Apache License 2.0 | 5 votes |
package _050_implicit import scala.language.implicitConversions abstract class Currency case class Dollar(value: Int) extends Currency case class Yuan(value: Int) extends Currency case class Euro(value: Int) extends Currency object _10_ImplicitConversion { implicit val int2Dollar: Int => Dollar = (x: Int) => Dollar(x) // implicit def int2Dollar(x: Int): Dollar = Dollar(x) def addDollars(x: Dollar, y: Dollar) = Dollar(x.value + y.value) def main(args: Array[String]): Unit = { // normal use println(s"addDollars(Dollar(10), Dollar(20)): ${addDollars(Dollar(10), Dollar(20))}") // using conversion we can type: println(s"addDollars(10, 20): ${addDollars(10, 20)}") println // using an implicit defined in the package object: def numItems(list:List[String]): String = list.mkString(",") print("numItems(5 -> \"Wow\") ==> ") println(numItems(5 -> "Wow")) } }
Example 80
Source File: _07_WrappersUsingImplicits.scala From LearningScala with Apache License 2.0 | 5 votes |
package _050_implicit import scala.language.implicitConversions object _07_WrappersUsingImplicits { class IntWrapper(x: Int) { def isEven: Boolean = x % 2 == 0 def isOdd: Boolean = !isEven } // implicit def int2IntWrapper(x:Int):IntWrapper = new IntWrapper(x) implicit val int2IntWrapper: Int => IntWrapper = (x: Int) => new IntWrapper(x) def main(args: Array[String]): Unit = { println(s"10.isOdd? ${10.isOdd}") println(s"10.isEven? ${10.isEven}") println(s"(10 + 6).isEven? ${(10 + 6).isEven}") } }
Example 81
Source File: _12_ImplicitsReplaceViewBounds.scala From LearningScala with Apache License 2.0 | 5 votes |
package _050_implicit object _12_ImplicitsReplaceViewBounds { import scala.language.implicitConversions implicit def str2Employee(s: String): MyEmployee = { val tokens = s.split(" ") new MyEmployee(tokens.head, tokens.last) } //noinspection DeprecatedViewBound def hireEmployee[A <% MyEmployee](a: A): String = { s"Hired this employee named ${a.firstName} ${a.lastName}" } //Another take, but preferred def hireEmployee2[A](a: A)(implicit ev: A => MyEmployee): String = { val employee: MyEmployee = ev(a) s"Hired this employee named ${employee.firstName} ${employee.lastName}" } def main(args: Array[String]): Unit = { println(hireEmployee("Mohammad Noor")) println(hireEmployee2("Mohammad Noor")) } } class Employee(val firstName: String, val lastName: String)
Example 82
Source File: LineStyle.scala From suzaku with Apache License 2.0 | 5 votes |
package suzaku.ui.style import suzaku.ui.Keywords import scala.language.implicitConversions sealed trait LineStyle case object LineNone extends LineStyle case object LineHidden extends LineStyle case object LineSolid extends LineStyle case object LineDotted extends LineStyle case object LineDashed extends LineStyle case object LineInset extends LineStyle case object LineOutset extends LineStyle case object LineDouble extends LineStyle object LineStyle { import boopickle.Default._ implicit val lineStylePickler = compositePickler[LineStyle] .addConcreteType[LineNone.type] .addConcreteType[LineHidden.type] .addConcreteType[LineSolid.type] .addConcreteType[LineDotted.type] .addConcreteType[LineDashed.type] .addConcreteType[LineInset.type] .addConcreteType[LineOutset.type] .addConcreteType[LineDouble.type] } trait LineStyleImplicits { implicit def none2Style(a: Keywords.none.type): LineStyle = LineNone implicit def hidden2Style(a: Keywords.hidden.type): LineStyle = LineHidden implicit def solid2Style(a: Keywords.solid.type): LineStyle = LineSolid implicit def dotted2Style(a: Keywords.dotted.type): LineStyle = LineDotted implicit def dashed2Style(a: Keywords.dashed.type): LineStyle = LineDashed implicit def inset2Style(a: Keywords.inset.type): LineStyle = LineInset implicit def outset2Style(a: Keywords.outset.type): LineStyle = LineOutset }
Example 83
Source File: Alignment.scala From suzaku with Apache License 2.0 | 5 votes |
package suzaku.ui.layout import suzaku.ui.Keywords import scala.language.implicitConversions sealed trait Alignment case object AlignAuto extends Alignment case object AlignStart extends Alignment case object AlignEnd extends Alignment case object AlignCenter extends Alignment case object AlignBaseline extends Alignment case object AlignStretch extends Alignment trait AlignmentImplicits { implicit def auto2align(a: Keywords.auto.type): Alignment = AlignAuto implicit def start2align(a: Keywords.start.type): Alignment = AlignStart implicit def end2align(a: Keywords.end.type): Alignment = AlignEnd implicit def center2align(a: Keywords.center.type): Alignment = AlignCenter implicit def baseline2align(a: Keywords.baseline.type): Alignment = AlignBaseline implicit def stretch2align(a: Keywords.stretch.type): Alignment = AlignStretch } object Alignment { import boopickle.Default._ implicit val alignmentPickler = compositePickler[Alignment] .addConcreteType[AlignAuto.type] .addConcreteType[AlignStart.type] .addConcreteType[AlignEnd.type] .addConcreteType[AlignCenter.type] .addConcreteType[AlignBaseline.type] .addConcreteType[AlignStretch.type] }
Example 84
Source File: UUIDUtils.scala From seals with Apache License 2.0 | 5 votes |
package dev.tauri.seals package core import java.util.UUID import java.nio.charset.StandardCharsets import scala.language.implicitConversions import scodec.bits.ByteVector final object UUIDUtils { implicit final class UUIDSyntax(private val self: UUID) extends AnyVal { def / (sub: UUID): UUIDBuilder = UUIDBuilder(self) / sub def / (sub: ByteVector): UUIDBuilder = UUIDBuilder(self) / sub def / (sub: String): UUIDBuilder = UUIDBuilder(self) / sub } final case class UUIDBuilder(namespace: UUID, name: Vector[ByteVector] = Vector.empty) { def / (sub: UUID): UUIDBuilder = copy(name = name :+ NsUUID.bvFromUUID(sub)) def / (sub: ByteVector): UUIDBuilder = copy(name = name :+ sub) def / (sub: String): UUIDBuilder = copy(name = name :+ ByteVector.view(sub.getBytes(StandardCharsets.UTF_8))) def uuid: UUID = NsUUID.uuid5nestedBv(namespace, name: _*) } implicit def uuidLiteralSyntax(sc: StringContext): macros.UUIDSyntax = new macros.UUIDSyntax(sc) }
Example 85
Source File: ImplicitConversions1.scala From Converter with GNU General Public License v3.0 | 5 votes |
package org.scalablytyped.converter.internal.ts.parser import scala.language.implicitConversions import scala.util.parsing.combinator.{ImplicitConversions, Parsers} trait ImplicitConversions1 extends ImplicitConversions { self: Parsers => @inline implicit final def flatten6[A, B, C, D, E, F, G]( g: (A, B, C, D, E, F) => G, ): (((((A ~ B) ~ C) ~ D) ~ E) ~ F) => G = { case a ~ b ~ c ~ d ~ e ~ f => g(a, b, c, d, e, f) } @inline implicit final def flatten7[A, B, C, D, E, F, G, H]( h: (A, B, C, D, E, F, G) => H, ): (((((A ~ B) ~ C) ~ D) ~ E) ~ F ~ G) => H = { case a ~ b ~ c ~ d ~ e ~ f ~ g => h(a, b, c, d, e, f, g) } @inline implicit final def flatten8[A, B, C, D, E, F, G, H, I]( i: (A, B, C, D, E, F, G, H) => I, ): A ~ B ~ C ~ D ~ E ~ F ~ G ~ H => I = { case a ~ b ~ c ~ d ~ e ~ f ~ g ~ h => i(a, b, c, d, e, f, g, h) } @inline implicit final def flatten9[A, B, C, D, E, F, G, H, I, J]( j: (A, B, C, D, E, F, G, H, I) => J, ): A ~ B ~ C ~ D ~ E ~ F ~ G ~ H ~ I => J = { case a ~ b ~ c ~ d ~ e ~ f ~ g ~ h ~ i => j(a, b, c, d, e, f, g, h, i) } }
Example 86
Source File: package.scala From ddd-on-scala with MIT License | 5 votes |
package crossroad0201.dddonscala import crossroad0201.dddonscala.domain.{DomainError, EntityId} import scala.util.{Failure, Success, Try} package object application { import scala.language.implicitConversions type ErrorCode = String // NOTE: アプリケーションサービスで、ドメインレイヤ/インフラレイヤ で発生するエラーを変換するための構文サポートです。 implicit class DomainErrorOps[E <: DomainError, R](domainResult: Either[E, R]) { def ifLeftThen(f: E => ServiceError): Either[ServiceError, R] = { domainResult match { case Left(e) => Left(f(e)) case Right(r) => Right(r) } } } // FIXME インフラ層も、Try より Either[Throwable, _] のほうがいいのでは? implicit class InfraErrorOps[S](infraResult: Try[S]) { def ifFailureThen(f: Throwable => ServiceError): Either[ServiceError, S] = { infraResult match { case Failure(e) => Left(f(e)) case Success(s) => Right(s) } } } implicit class TryOptionOps[T](maybeValue: Try[Option[T]]) { def ifNotExists(f: => ServiceError)(): Either[ServiceError, T] = { maybeValue match { case Success(Some(s)) => Right(s) case Success(None) => Left(f) case Failure(e) => Left(SystemError(e)) } } } def asServiceError[E](implicit f: E => ServiceError): E => ServiceError = f }
Example 87
Source File: package.scala From ddd-on-scala with MIT License | 5 votes |
package crossroad0201.dddonscala.domain import crossroad0201.dddonscala.domain.user.User package object task { import scala.language.implicitConversions case class TaskId(value: String) extends AnyVal with EntityId object TaskId { def newId(implicit idGen: EntityIdGenerator): TaskId = TaskId(idGen.genId()) } def createTask(name: TaskName)(implicit idGen: EntityIdGenerator, metaDataCreator: EntityMetaDataCreator): DomainResult[Task, TaskCreated] = { val task = Task( id = TaskId.newId, name = name, authorId = user.id, metaData = metaDataCreator.create ) val event = TaskCreated( taskId = task.id, name = task.name, authorId = task.authorId ) DomainResult(task, event) } } implicit class Assignee(user: User) { def assignTo(task: Task): Either[TaskAlreadyClosed, DomainResult[Task, TaskAssigned]] = task.assign(user) } implicit class Commenter(user: User) { def commentTo(task: Task, message: CommentMessage): DomainResult[Task, TaskCommented] = task.addComment(Comment(message, user.id)) } }
Example 88
Source File: HeapSort.scala From functional-way with GNU General Public License v3.0 | 5 votes |
package heapsort import scala.language.implicitConversions private def swap(arr: Array[Int], i1: Int, i2: Int): Array[Int] = val elem1 = arr(i1) arr.updated(i1, arr(i2)).updated(i2, elem1) private def heapifyMax(arr: Array[Int], root: Int, heapSize: Int): Array[Int] = val rootIdx = root val lIdx = (root + 1) * 2 - 1 val rIdx = (root + 1) * 2 (arr.lift(rootIdx), arr.lift(lIdx), arr.lift(rIdx)) match case (Some(p), Some(l), Some(r)) if r > l && r > p && heapSize >= rIdx => heapifyMax(swap(arr, rIdx, rootIdx), rIdx, heapSize) case (Some(p), Some(l), _) if l > p && heapSize >= lIdx => heapifyMax(swap(arr, lIdx, rootIdx), lIdx, heapSize) case _ => arr private def buildMaxHeap(arr: Array[Int]): Array[Int] = (Math.floor(arr.length / 2).toInt - 1 to 0 by -1).foldLeft(arr)(heapifyMax(_, _, arr.length)) def heapSort(arr: Array[Int]): Array[Int] = (arr.length - 1 to 0 by -1).foldLeft(buildMaxHeap(arr))((b, a) => heapifyMax(swap(b, 0, a), 0, a - 1)) @main def heapSorter = println(heapifyMax(Array(1, 4, 5, 2, 4, 3, 5), 0, 7).mkString(",")) println(buildMaxHeap(Array(7, 4, 3, 0, 2, 1, 9, 5, 6)).mkString(",")) println(heapSort(Array(7, 4, 3, 0, 2, 1, 9, 5, 6)).mkString(",")) println(heapSort((1 to 1000).map(_ => scala.util.Random.nextInt()).toArray).mkString(","))
Example 89
Source File: FormSentence.scala From functional-way with GNU General Public License v3.0 | 5 votes |
package divideandconquer import util.Implicits._ import scala.language.implicitConversions def getMeaningfulSentence(input : String)(implicit dictionary : Set[String]) : Option[List[String]] = def isWord(word : String) : Boolean = dictionary.contains(word) def internalFunc(possibleSentence : String) : Option[List[String]] = possibleSentence.toList match case List() => None case a :: List() => if(isWord(a.toString)) Some(List(a.toString)) else None case other1 if isWord(other1.mkString("")) => Some(List(other1.mkString(""))) case other2 => (1 to other2.length).map(l =>{ if(isWord(other2.take(l))) internalFunc(other2.drop(l).mkString("")) match case Some(list) => Some(other2.take(l).mkString("")::list) case None => None else None }).reduce((a,b) => if(a.isEmpty && b.isEmpty) None else if(a.nonEmpty) a else b) internalFunc(input) @main def formSentence = val sentenceWithoutSpace = "iamlegend" val dictionary : Set[String] = Set("i", "am","legend","and","you") val meaningfulList : Option[List[String]]=getMeaningfulSentence(sentenceWithoutSpace)(dictionary) meaningfulList match case Some(list) => println(list.mkString(" ")) case None => println("A meaningful sentence cannot be formed.")
Example 90
Source File: MergeMapsProperties.scala From functional-way with GNU General Public License v3.0 | 5 votes |
import org.scalacheck._ import misc.mergeMaps import scala.language.implicitConversions class MergeMapsProperties extends Properties(name = "MergeMaps") { import Prop.BooleanOperators property("merge/int") = mergeProp[Int] property("merge/string") = mergeProp[String] // runs green, but 100 iterations take quite some time... // property("merge/map") = mergeProp[Map[String, Int]] private def mergeProp[T : Arbitrary : Cogen]: Prop = Prop.forAll { (ms: Seq[Map[String, T]], f: (T, T) => T) => ms.nonEmpty ==> { val allKeys = ms.map(_.keySet).reduce(_ ++ _) def expVal(k: String): T = ms.flatMap(_.get(k).toSeq).reduceLeft(f) val res = mergeMaps(f, ms:_*) res.keySet == allKeys && allKeys.forall(k => res(k) == expVal(k)) // Actually this is an alternative implementation of MergeMaps... :/ // val exp = allKeys.map(k => (k, expVal(k))).toMap // res == exp } } }
Example 91
Source File: BuildBinaryTreeTest.scala From functional-way with GNU General Public License v3.0 | 5 votes |
import org.junit.Test import org.junit.Assert._ import org.junit._ import types.Tree import types.Tree._ import misc._ import scala.language.implicitConversions class BuildBinaryTreeTest { @Test def testBuildBinaryTree() = { val input: Array[Char] = "ABCDEFGHIJ".toCharArray match case seq@Array(_*) => seq case somethingElse => throw new Exception(s"${somethingElse.getClass} returned") val tree: Tree[Char] = buildBinaryTree(input) def traverseAndAssert(tree: Tree[Char]): Seq[Char] = tree match case Node(_, left: Node[Char], v, right: Node[Char]) => assertTrue(Math.abs(left.height - right.height) <= 1) val visitedNodes1: Seq[Char] = traverseAndAssert(left) val visitedNodes2: Seq[Char] = traverseAndAssert(right) v +: (visitedNodes1 ++ visitedNodes2) case Node(_, left, v, right) => val visitedNodes1: Seq[Char] = traverseAndAssert(left) val visitedNodes2: Seq[Char] = traverseAndAssert(right) v +: (visitedNodes1 ++ visitedNodes2) case Leaf() => assertTrue(true) Seq() val visitedNodes: Seq[Char] = traverseAndAssert(tree) assertEquals(10, visitedNodes.size) visitedNodes.foreach(n => assertTrue(input.contains(n))) } }
Example 92
Source File: implicits.scala From spatial with MIT License | 5 votes |
package emul import scala.language.implicitConversions object implicits { implicit def fixedPointToInt(x: FixedPoint): Int = x.toInt implicit def intToFixedPoint(x: Int): FixedPoint = FixedPoint.fromInt(x) implicit def boolToBoolean(x: Bool): Boolean = x.value implicit def booleanToBool(x: Boolean): Bool = Bool(x) implicit class BoolArrayOps(x: Array[Bool]) { def toStr: String = "0b" + x.sliding(4,4).map{nibble => nibble.map{b => b.toStr}.reverse.mkString("") }.toList.reverse.mkString(",") def toFmtStr(fmt: FltFormat): String = { val sign = x.last val s = if (sign.value) "1" else "0" val exp = x.slice(fmt.sbits, fmt.sbits+fmt.ebits) val e = exp.toStr.drop(2) val sig = x.slice(0, fmt.sbits) val g = sig.toStr.drop(2) s"$s|$e|$g" } } implicit class StringArrayOps(x: String) { def toStr: String = x.reverse.sliding(4,4).map{nibble => nibble.reverse.mkString("") }.toList.reverse.mkString(",") } implicit class ByteArrayOps(x: Array[Byte]) { def toStr: String = "0b" + x.reverse.flatMap{byte => val big = List.tabulate(4){i => if ((byte & (1 << (i+4))) > 0) "1" else "0" }.reverse.mkString("") val ltl = List.tabulate(4){i => if ((byte & (1 << i)) > 0) "1" else "0" }.reverse.mkString("") List(big,ltl) }.mkString(",") } }
Example 93
package emul import scala.language.implicitConversions case class Ptr[T](var x: T) { def set(x2: T): Ptr[T] = { x = x2; this } def value: T = x private var initValue : T = _ private var needsInit: Boolean = true def initMem(init: T): Unit = if (needsInit) { x = init initValue = init needsInit = false } def reset() : Ptr[T] = { x = initValue; this } }
Example 94
Source File: FilePattern.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels import com.sksamuel.exts.Logging import io.eels.util.HdfsIterator import org.apache.hadoop.fs.{FileSystem, Path} import scala.language.implicitConversions object FilePattern { def apply(path: Path)(implicit fs: FileSystem): FilePattern = apply(path.toString()) def apply(path: java.nio.file.Path)(implicit fs: FileSystem): FilePattern = apply(path.toAbsolutePath().toString(), { _ => true }) implicit def stringToFilePattern(str: String)(implicit fs: FileSystem): FilePattern = FilePattern(str) } case class FilePattern(pattern: String, filter: org.apache.hadoop.fs.Path => Boolean = { _ => true }) extends Logging { def isRegex(): Boolean = pattern.contains("*") def isDirectory(): Boolean = pattern.endsWith("/") def toPaths()(implicit fs: FileSystem): List[Path] = { val paths = if (isRegex) { val regex = new Path(pattern).getName.replace("*", ".*?") val dir = new Path(pattern).getParent logger.debug(s"File expansion will check path $dir for files matching $regex") HdfsIterator.remote(fs.listFiles(dir, false)).toList .map(_.getPath) .filter { path => path.getName.matches(regex) } .filter(filter) } else if (fs.isDirectory(new Path(pattern))) { val path = new Path(pattern.stripSuffix("/")) logger.debug(s"File expansion will search directory $path") HdfsIterator.remote(fs.listFiles(path, false)).map(_.getPath).toList.filter(fs.isFile).filter(filter) } else { List(new Path(pattern)) } logger.debug(s"toPaths has returned ${paths.size} paths, first 5: ${paths.take(5).mkString(",")}") paths } def withFilter(p: Path => Boolean): FilePattern = copy(filter = p) }
Example 95
Source File: syntax.scala From sbt-org-policies with Apache License 2.0 | 5 votes |
package sbtorgpolicies.runnable import sbt.TaskKey import scala.language.implicitConversions object syntax { implicit def runnableTaskOpsSyntax[T](taskKey: TaskKey[T]): RunnableTaskOps[T] = new RunnableTaskOps[T](taskKey) implicit def runnableSetSettingOpsSyntax[T](setSetting: SetSetting[T]): RunnableSetSettingOps[T] = new RunnableSetSettingOps[T](setSetting) implicit def runnableCommandOpsSyntax[T](command: String): RunnableCommandOps = new RunnableCommandOps(command) implicit def runnableCommandListOpsSyntax[T](commandList: List[String]): RunnableCommandListOps = new RunnableCommandListOps(commandList) final class RunnableTaskOps[T](taskKey: TaskKey[T]) { def asRunnableItemFull: RunnableItemConfigScope[T] = asRunnableItem(allModules = true, aggregated = true, crossScalaVersions = true) def asRunnableItem: RunnableItemConfigScope[T] = asRunnableItem(allModules = false, aggregated = false, crossScalaVersions = false) def asRunnableItem( allModules: Boolean, aggregated: Boolean, crossScalaVersions: Boolean ): RunnableItemConfigScope[T] = RunnableItemConfigScope(RunnableTask(taskKey), allModules, aggregated, crossScalaVersions) } final class RunnableSetSettingOps[T](setSetting: SetSetting[T]) { def asRunnableItemFull: RunnableItemConfigScope[T] = asRunnableItem(allModules = true, aggregated = true, crossScalaVersions = true) def asRunnableItem: RunnableItemConfigScope[T] = asRunnableItem(allModules = false, aggregated = false, crossScalaVersions = false) def asRunnableItem( allModules: Boolean, aggregated: Boolean, crossScalaVersions: Boolean ): RunnableItemConfigScope[T] = RunnableItemConfigScope( RunnableSetSetting(setSetting), allModules, aggregated, crossScalaVersions ) } final class RunnableCommandOps(command: String) { def asRunnableItemFull: RunnableItemConfigScope[Unit] = asRunnableItem(allModules = true, aggregated = true, crossScalaVersions = true) def asRunnableItem: RunnableItemConfigScope[Unit] = asRunnableItem(allModules = false, aggregated = false, crossScalaVersions = false) def asRunnableItem( allModules: Boolean, aggregated: Boolean, crossScalaVersions: Boolean ): RunnableItemConfigScope[Unit] = RunnableItemConfigScope(RunnableProcess(command), allModules, aggregated, crossScalaVersions) def asCmd: String = if (command.contains("/")) s";project ${command.replaceAll("/", ";")}" else s";$command" } final class RunnableCommandListOps(commandList: List[String]) { def asCmd: String = commandList.map(_.asCmd).mkString("") } }
Example 96
Source File: io.scala From sbt-org-policies with Apache License 2.0 | 5 votes |
package sbtorgpolicies import java.io._ import java.net.URL import java.nio.charset.Charset import java.nio.file.Path import java.nio.file.Paths.get import cats.syntax.either._ import sbtorgpolicies.exceptions.IOException import scala.io.Source import scala.language.implicitConversions package object io { type IOResult[T] = Either[IOException, T] object syntax { implicit def eitherFilterSyntax[T](either: Either[Throwable, T]): FilteredEitherOps[T] = new FilteredEitherOps(either) implicit def fileNameSyntax(fileName: String): FileNameOps = new FileNameOps(fileName) final class FilteredEitherOps[T](either: Either[Throwable, T]) { def withFilter(f: T => Boolean): Either[Throwable, T] = either match { case Right(r) if !f(r) => new IllegalStateException("Filter condition has not been satisfied").asLeft[T] case _ => either } } final class FileNameOps(filename: String) { def toPath: Path = get(filename) def toFile: File = new File(filename.fixPath) def fixPath: String = filename.replaceAll("/", File.separator) def ensureFinalSlash: String = filename + (if (filename.endsWith(File.separator)) "" else File.separator) } } object IO { def file(path: String): File = new File(path) def url(address: String): URL = new URL(address) def readLines(file: File): Iterator[String] = Source.fromFile(file).getLines() def readBytes(file: File): Array[Byte] = { val is: InputStream = new FileInputStream(file) val array: Array[Byte] = Stream.continually(is.read).takeWhile(_ != -1).map(_.toByte).toArray is.close() array } def write(file: File, content: String, charset: Charset = Charset.forName("UTF-8")): Unit = { val writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(file, false), charset) ) writer.write(content) writer.close() } def relativize(base: File, file: File): Option[String] = { def ensureEndingSlash: Option[String] = { val path = base.getAbsolutePath path.lastOption.map { case c if c == File.separatorChar => path case _ => path + File.separatorChar } } val baseFileString = if (base.isDirectory) ensureEndingSlash else None val pathString = file.getAbsolutePath baseFileString flatMap { case baseString if pathString.startsWith(baseString) => Some(pathString.substring(baseString.length)) case _ => None } } } }
Example 97
Source File: ApiError.scala From EncryCore with GNU General Public License v3.0 | 5 votes |
package encry.api.http import akka.http.scaladsl.model.{StatusCode, StatusCodes} import akka.http.scaladsl.server.{Directives, Route} import scala.language.implicitConversions case class ApiError(statusCode: StatusCode, reason: String = "") { def apply(detail: String): Route = complete(detail) def defaultRoute: Route = complete() def complete(detail: String = ""): Route = { val nonEmptyReason = if (reason.isEmpty) statusCode.reason else reason val body = if (detail.isEmpty) nonEmptyReason else s"$nonEmptyReason $detail" Directives.complete(statusCode.intValue() -> body) } } object ApiError { def apply(s: String): Route = InternalError(s) def apply(e: Throwable): Route = InternalError(e.getMessage) def apply(causes: Seq[Throwable]): Route = InternalError(mkString(causes)) def mkString(causes: Seq[Throwable]): String = causes.map(_.getMessage).mkString(", ") implicit def toRoute(error: ApiError): Route = error.defaultRoute object InternalError extends ApiError(StatusCodes.InternalServerError, "internal.error") object InvalidJson extends ApiError(StatusCodes.BadRequest, "invalid.json") object BadRequest extends ApiError(StatusCodes.BadRequest, "bad.request") object ApiKeyNotValid extends ApiError(StatusCodes.Forbidden, "invalid.api-key") object NotExists extends ApiError(StatusCodes.NotFound, "not-found") }
Example 98
Source File: ConfigSupport.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.common.config import java.util.Properties import java.util.concurrent.TimeUnit import cats.implicits._ import com.typesafe.config.{Config, ConfigFactory, ConfigObject} import scala.concurrent.duration.FiniteDuration import scala.language.implicitConversions trait ConfigSupport extends ConfigComponent { private val defaultConfig = ConfigFactory.load() val applicationName: String = defaultConfig.getString("application.name") val rootConfig: Config = defaultConfig val applicationConfig: Config = rootConfig.getConfig(applicationName) } object ConfigSupport { import scala.collection.JavaConverters._ implicit def toMap(cfg: ConfigObject): Map[String, Object] = { cfg.toConfig .entrySet() .asScala .map({ entry => entry.getKey -> entry.getValue.unwrapped() })( collection.breakOut ) } implicit def toMap(cfg: Config): Map[String, Object] = { cfg .entrySet() .asScala .map({ entry => entry.getKey -> entry.getValue.unwrapped() })( collection.breakOut ) } implicit def toProps(map: Map[String, AnyRef]): Properties = { map.foldLeft(new Properties) { case (a, (k, v)) => a.put(k, v) a } } implicit class ConfigImplicits(config: Config) { def getDurationOpt(path: String): Option[FiniteDuration] = getOptional(path, config.getDuration).map(d => FiniteDuration(d.toNanos, TimeUnit.NANOSECONDS)) def getStringOpt(path: String): Option[String] = getOptional(path, config.getString) def getConfigOpt(path: String): Option[Config] = getOptional(path, config.getConfig) def getIntOpt(path: String): Option[Int] = getOptional(path, config.getInt) def getBooleanOpt(path: String): Option[Boolean] = getOptional(path, config.getBoolean) def getStringListOpt(path: String): Option[List[String]] = getOptional(path, config.getStringList).map(_.asScala.toList) private def getOptional[A](path: String, method: String => A): Option[A] = { if (config.hasPath(path)) { method(path).some } else { none } } } }
Example 99
Source File: VertexPartition.scala From graphx-algorithm with GNU General Public License v2.0 | 5 votes |
package org.apache.spark.graphx.impl import scala.reflect.ClassTag import org.apache.spark.util.collection.BitSet import org.apache.spark.graphx._ import org.apache.spark.graphx.util.collection.GraphXPrimitiveKeyOpenHashMap private[graphx] object VertexPartition { private[graphx] class VertexPartition[VD: ClassTag]( val index: VertexIdToIndexMap, val values: Array[VD], val mask: BitSet) extends VertexPartitionBase[VD] private[graphx] class VertexPartitionOps[VD: ClassTag](self: VertexPartition[VD]) extends VertexPartitionBaseOps[VD, VertexPartition](self) { def withIndex(index: VertexIdToIndexMap): VertexPartition[VD] = { new VertexPartition(index, self.values, self.mask) } def withValues[VD2: ClassTag](values: Array[VD2]): VertexPartition[VD2] = { new VertexPartition(self.index, values, self.mask) } def withMask(mask: BitSet): VertexPartition[VD] = { new VertexPartition(self.index, self.values, mask) } }
Example 100
Source File: Writer.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.example.querydb //#writer import java.lang.{ Long => JLong } import akka.actor.ActorRef import com.datastax.driver.core._ import com.rbmhtechnology.eventuate.EventsourcedWriter import scala.concurrent.Future override def readSuccess(result: Long): Option[Long] = Some(result + 1L) } object Writer { import java.util.concurrent.Executor import com.google.common.util.concurrent.ListenableFuture import scala.concurrent.{ ExecutionContext, Promise } import scala.language.implicitConversions import scala.util.Try implicit class ListenableFutureConverter[A](lf: ListenableFuture[A])(implicit executionContext: ExecutionContext) { def toFuture: Future[A] = { val promise = Promise[A] lf.addListener(new Runnable { def run() = promise.complete(Try(lf.get())) }, executionContext.asInstanceOf[Executor]) promise.future } } } //#
Example 101
Source File: package.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.adapter import com.rbmhtechnology.eventuate.EventsourcedView import io.vertx.core.{ Future => VertxFuture, _ } import io.vertx.rxjava.core.{ Vertx => RxVertx } import rx.functions.Func1 import scala.concurrent.Promise import scala.util.{ Failure, Success } package object vertx { object VertxConverters { import scala.language.implicitConversions implicit def rxVertxToVertx(rxVertx: RxVertx): Vertx = rxVertx.getDelegate.asInstanceOf[Vertx] implicit def vertxToRxVertx(vertx: Vertx): RxVertx = new RxVertx(vertx) } object VertxHandlerConverters { implicit class Fn0AsHandler(fn: => Unit) { def asVertxHandler: Handler[Void] = new Handler[Void] { override def handle(event: Void): Unit = fn } } implicit class Fn1AsHandler[A](fn: A => Unit) { def asVertxHandler: Handler[A] = new Handler[A] { override def handle(event: A): Unit = fn(event) } } implicit class EventuateHandlerAsVertxHandler[A](h: EventsourcedView.Handler[A]) { def asVertxHandler: Handler[AsyncResult[A]] = new Handler[AsyncResult[A]] { override def handle(ar: AsyncResult[A]): Unit = { if (ar.succeeded()) { h(Success(ar.result())) } else { h(Failure(ar.cause())) } } } } implicit class HandlerAsEventuateHandler[A](h: Handler[AsyncResult[A]]) { def asEventuateHandler: EventsourcedView.Handler[A] = { case Success(res) => h.handle(VertxFuture.succeededFuture(res)) case Failure(err) => h.handle(VertxFuture.failedFuture(err)) } } implicit class PromiseAsVertxHandler[A](promise: Promise[A]) { def asVertxHandler: Handler[AsyncResult[A]] = new Handler[AsyncResult[A]] { override def handle(ar: AsyncResult[A]): Unit = { if (ar.succeeded()) { promise.success(ar.result()) } else { promise.failure(ar.cause()) } } } } } object RxConverters { implicit class Fn1AsRxFunc1[A, B](fn: A => B) { def asRx: Func1[A, B] = new Func1[A, B] { override def call(a: A): B = fn(a) } } } object VertxExtensions { implicit class RichMultiMap(map: MultiMap) { def getAsOption(name: String): Option[String] = Option(map.get(name)) def getOrElseThrow(name: String): String = if (map.contains(name)) { map.get(name) } else { throw new IllegalArgumentException(s"No entry for key '$name' found.") } } } }
Example 102
Source File: EitherOps.scala From scalapb-circe with MIT License | 5 votes |
package scalapb_circe import scala.language.implicitConversions class EitherOps[A](val self: Either[Throwable, A]) extends AnyVal { def getOrError: A = self match { case Right(a) => a case Left(a) => throw a } } object EitherOps { implicit def toEitherOps[A](self: Either[Throwable, A]): EitherOps[A] = new EitherOps[A](self) }
Example 103
Source File: Options.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.shaded.jackson.module.scala.util import scala.language.implicitConversions trait OptionW[A] extends PimpedType[Option[A]] { def optMap[B](f: A => B): Option[B] = if (value.isEmpty) None else Option(f(value.get)) } object OptionW { def apply[A](a: => Option[A]): OptionW[A] = new OptionW[A] { lazy val value = a } def unapply[A](v: OptionW[A]): Option[Option[A]] = Some(v.value) } trait Options { implicit def mkOptionW[A](x: Option[A]): OptionW[A] = OptionW(x) implicit def unMkOptionW[A](x: OptionW[A]): Option[A] = x.value }
Example 104
Source File: Strings.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.shaded.jackson.module.scala.util import scala.language.implicitConversions trait StringW extends PimpedType[String] { def orIfEmpty(s2: => String) = if (value.isEmpty) s2 else value } object StringW { def apply(s: => String): StringW = new StringW { lazy val value = s } def unapply(s: StringW): Option[String] = Some(s.value) } trait Strings { implicit def mkStringW(x: => String): StringW = StringW(x) implicit def unMkStringW(x: StringW): String = x.value }
Example 105
Source File: Classes.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.shaded.jackson.module.scala.util import scala.language.implicitConversions import scala.reflect.{ScalaLongSignature, ScalaSignature} trait ClassW extends PimpedType[Class[_]] { def hasSignature: Boolean = { def hasSigHelper(clazz: Class[_]): Boolean = { if (clazz == null) false else if (clazz.isAnnotationPresent(classOf[ScalaSignature]) || clazz.isAnnotationPresent(classOf[ScalaLongSignature])) true //if the class does not have the signature, check it's enclosing class (if present) else hasSigHelper(clazz.getEnclosingClass) } hasSigHelper(value) } } object ClassW { def apply(c: => Class[_]): ClassW = new ClassW { lazy val value = c } def unapply(c: ClassW): Option[Class[_]] = Some(c.value) } trait Classes { implicit def mkClassW(x: => Class[_]): ClassW = ClassW(x) implicit def unMkClassW[A](x: ClassW): Class[_] = x.value }
Example 106
Source File: JsonConverters.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.mango.json import java.io.InputStream import com.kakao.shaded.jackson.core.{JsonParser, JsonToken} import com.kakao.shaded.jackson.databind.{DeserializationFeature, ObjectMapper, ObjectWriter} import com.kakao.shaded.jackson.module.afterburner.AfterburnerModule import com.kakao.shaded.jackson.module.scala.DefaultScalaModule import scala.language.implicitConversions import scala.reflect._ def streamJson(parser: JsonParser): JsonIterator = { val accessor = JsonTokenAccessor(parser) new Iterator[(JsonToken, JsonTokenAccessor)] { override def hasNext: Boolean = !parser.isClosed override def next(): (JsonToken, JsonTokenAccessor) = { val token = parser.nextToken() if (token == null) parser.close() (token, accessor) } } } }
Example 107
Source File: ConcurrentConverters.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.mango.concurrent import java.util.concurrent.{ConcurrentMap, TimeUnit, TimeoutException} import com.kakao.shaded.netty.util.{HashedWheelTimer, Timeout, TimerTask} import scala.collection.JavaConversions._ import scala.concurrent.duration._ import scala.concurrent.{Future, Promise} import scala.language.implicitConversions def timeout(duration: Duration): Future[Nothing] = { val promise = Promise[Nothing]() timer.newTimeout(new TimerTask { override def run(timeout: Timeout): Unit = { promise.failure(new TimeoutException(s"Operation was timed out after $duration")) } }, duration.toMillis, TimeUnit.MILLISECONDS) promise.future } implicit def toRichFuture[T](future: Future[T])(implicit timeout: Duration = 5.seconds): RichFuture[T] = new RichFuture[T](future, timeout) implicit def toEnsuring[K, V](map: ConcurrentMap[K, V]): EnsuringMap[K, V] = new EnsuringMap(map) implicit def toEnsuring[K, V](map: scala.collection.concurrent.Map[K, V]): EnsuringMap[K, V] = new EnsuringMap(map) }
Example 108
Source File: NamedExecutors.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.mango.concurrent import java.util.concurrent.Executors._ import java.util.concurrent.ForkJoinPool.ForkJoinWorkerThreadFactory import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.{ForkJoinWorkerThread, ExecutorService, ScheduledExecutorService, ForkJoinPool} import scala.language.implicitConversions object NamedExecutors { implicit def toRich(e: ExecutorService): RichExecutorService = new RichExecutorService(e) implicit def toRich(e: ScheduledExecutorService): RichScheduledExecutorService = new RichScheduledExecutorService(e) def scheduled(name: String, daemon: Boolean = true): RichScheduledExecutorService = { newSingleThreadScheduledExecutor(NamedThreadFactory(name, daemon)) } def scheduledPool(name: String, size: Int, daemon: Boolean = true): RichScheduledExecutorService = { newScheduledThreadPool(size, NamedThreadFactory(name, daemon)) } def cached(name: String, daemon: Boolean = true): RichExecutorService = { newCachedThreadPool(NamedThreadFactory(name, daemon)) } def fixed(name: String, size: Int, daemon: Boolean = true): RichExecutorService = { newFixedThreadPool(size, NamedThreadFactory(name, daemon)) } def single(name: String, daemon: Boolean = true): RichExecutorService = { newSingleThreadExecutor(NamedThreadFactory(name, daemon)) } def forkJoin(name: String, size: Int, daemon: Boolean = true, asyncMode: Boolean = false): RichExecutorService = { val counter = new AtomicInteger() new ForkJoinPool(size, new ForkJoinWorkerThreadFactory { override def newThread(pool: ForkJoinPool): ForkJoinWorkerThread = { val thread = new ForkJoinWorkerThread(pool) {} thread.setName(s"$name-${counter.incrementAndGet()}") thread.setDaemon(daemon) thread } }, null, asyncMode) } }
Example 109
Source File: EnsuringMap.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.mango.concurrent import java.util.concurrent.ConcurrentMap import scala.language.implicitConversions def ensureEntry(key: K, default: => V): V = { var result = map.get(key) if (result == null) { val value = default result = map.putIfAbsent(key, value) if (result == null) { result = value } } result } }
Example 110
Source File: CustomedHBaseResources.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources.hbase import scala.language.implicitConversions import org.apache.hadoop.hbase.TableName import org.apache.hadoop.hbase.client._ case class CustomedRegionResource(relation: HBaseRelationTrait) extends ReferencedResource { // INLINE: SmartConnection is private[hbase], so we make a fake one here var connection: SmartConnection = _ var rl: RegionLocator = _ override def init(): Unit = { connection = HBaseConnectionCache.getConnection(relation.hbaseConf) rl = connection.getRegionLocator( TableName.valueOf(relation.catalog.namespace, relation.catalog.name)) } override def destroy(): Unit = { if (rl != null) { rl.close() rl = null } if (connection != null) { connection.close() connection = null } } val regions = releaseOnException { val keys = rl.getStartEndKeys keys.getFirst .zip(keys.getSecond) .zipWithIndex .map( x => CustomedHBaseRegion( x._2, Some(x._1._1), Some(x._1._2), Some(rl.getRegionLocation(x._1._1).getHostname))) } } case class CustomedTableResource(relation: HBaseRelationTrait) extends ReferencedResource { var connection: SmartConnection = _ var table: Table = _ override def init(): Unit = { connection = HBaseConnectionCache.getConnection(relation.hbaseConf) table = connection.getTable(TableName.valueOf(relation.catalog.namespace, relation.catalog.name)) } override def destroy(): Unit = { if (table != null) { table.close() table = null } if (connection != null) { connection.close() connection = null } } def get(list: java.util.List[org.apache.hadoop.hbase.client.Get]): CustomedGetResource = releaseOnException { CustomedGetResource(this, table.get(list)) } def getScanner(scan: Scan): CustomedScanResource = releaseOnException { CustomedScanResource(this, table.getScanner(scan)) } } case class CustomedScanResource(tbr: CustomedTableResource, rs: ResultScanner) extends Resource { def release() { rs.close() tbr.release() } } case class CustomedGetResource(tbr: CustomedTableResource, rs: Array[Result]) extends Resource { def release() { tbr.release() } }
Example 111
Source File: SqlContextAccessor.scala From HANAVora-Extensions with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources import org.apache.spark.sql.SQLContext import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import scala.language.implicitConversions object SqlContextAccessor { implicit def sqlContextToCatalogAccessable(sqlContext: SQLContext): SqlContextCatalogAccessor = new SqlContextCatalogAccessor(sqlContext) class SqlContextCatalogAccessor(sqlContext: SQLContext) extends SQLContext(sqlContext.sparkContext) { def registerRawPlan(lp: LogicalPlan, tableName: String): Unit = { sqlContext.catalog.registerTable(TableIdentifier(tableName), lp) } } }
Example 112
Source File: ValidatingPropertyMap.scala From HANAVora-Extensions with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.util import scala.language.implicitConversions class ValidatingPropertyMap[K, V](val m: Map[K, V]) { def getString(key: K): String = m.get(key) match { case Some(value: String) => value case Some(value) => value.toString case None => throw new RuntimeException(s"$key is mandatory") } def getBoolean(key: K, default: => Boolean): Boolean = m.get(key) match { case Some(value: Boolean) => value case Some(value) => value.toString.toBoolean case None => default } def getMandatoryBoolean(key: K): Boolean = m.get(key) match { case Some(value: Boolean) => value case Some(value) => value.toString.toBoolean case None => throw new RuntimeException(s"$key is mandatory") } def getString(key: K, default: => String): String = { m.get(key) match { case Some(value: String) => value case Some(value) => value.toString case None => default } } def getInt(key: K, default: => Int): Int = { m.get(key) match { case Some(value: String) => Integer.parseInt(value) case Some(value: Int) => value case None => default case _ => default } } def getSeq(key: K, default: => Seq[String]): Seq[String] = m.get(key) match { case Some(value: String) => value.split(",").map(_.trim).toSeq case Some(value) => value.toString.split(",").map(_.trim).toSeq case None => default } def getMandatorySeq(key: K): Seq[String] = this.getMandatory(key) match { case value: String => value.split(",").map(_.trim).toSeq case value => value.toString.split(",").map(_.trim).toSeq } def getMandatory(key: K): V = m.get(key) match { case Some(value) => value case None => throw new RuntimeException(s"$key is mandatory") } } object ValidatingPropertyMap { implicit def map2ValidatingPropertyMap[K, V](m: Map[K, V]): ValidatingPropertyMap[K, V] = new ValidatingPropertyMap[K, V](m) }
Example 113
Source File: TruckAndTrafficJoinBolt.scala From trucking-iot with Apache License 2.0 | 5 votes |
package com.orendainx.trucking.storm.bolts import java.util import com.orendainx.trucking.commons.models.{EnrichedTruckAndTrafficData, EnrichedTruckData, TrafficData} import com.typesafe.scalalogging.Logger import org.apache.storm.task.{OutputCollector, TopologyContext} import org.apache.storm.topology.OutputFieldsDeclarer import org.apache.storm.topology.base.BaseWindowedBolt import org.apache.storm.tuple.{Fields, Values} import org.apache.storm.windowing.TupleWindow import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer import scala.collection.{Map, mutable} import scala.language.implicitConversions private def processAndEmitData(truckDataPerRoute: Map[Int, ListBuffer[EnrichedTruckData]], trafficDataPerRoute: Map[Int, ListBuffer[TrafficData]]) { // For each EnrichedTruckData object, find the TrafficData object with the closest timestamp truckDataPerRoute.foreach { case (routeId, truckDataList) => trafficDataPerRoute.get(routeId) match { case None => // No traffic data for this routeId, so drop/ignore truck data case Some(trafficDataList) => truckDataList foreach { truckData => trafficDataList.sortBy(data => math.abs(data.eventTime - truckData.eventTime)).headOption match { case None => // Window didn't capture any traffic data for this truck's route case Some(trafficData) => val joinedData = EnrichedTruckAndTrafficData(truckData.eventTime, truckData.truckId, truckData.driverId, truckData.driverName, truckData.routeId, truckData.routeName, truckData.latitude, truckData.longitude, truckData.speed, truckData.eventType, truckData.foggy, truckData.rainy, truckData.windy, trafficData.congestionLevel) outputCollector.emit(new Values("EnrichedTruckAndTrafficData", joinedData)) } } } } } override def declareOutputFields(declarer: OutputFieldsDeclarer): Unit = declarer.declare(new Fields("dataType", "data")) }
Example 114
Source File: VectorRDDFunctions.scala From spark-vl-bfgs with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.optim import scala.language.implicitConversions import org.apache.spark.HashPartitioner import org.apache.spark.rdd.RDD import org.apache.spark.mllib.linalg._ class VectorRDDFunctions(self: RDD[Vector]) { def treeSum(depth: Int = 2): RDD[Vector] = { val zeroValue: Vector = null val seqOp = (s: Vector, v: Vector) => { if (s != null) { BLAS.axpy(1.0, v, s) s } else { v.copy.toDense } } val combOp = (s1: Vector, s2: Vector) => { // TODO: handle empty partitions BLAS.axpy(1.0, s2, s1) s1 } require(depth >= 1, s"Depth must be greater than or equal to 1 but got $depth.") val aggregatePartition = (it: Iterator[Vector]) => it.aggregate(zeroValue)(seqOp, combOp) var partiallyAggregated = self.mapPartitions(it => Iterator(aggregatePartition(it))) var numPartitions = partiallyAggregated.partitions.length val scale = math.max(math.pow(numPartitions, 1.0 / depth), 2.0) while (numPartitions > 1) { numPartitions = math.ceil(numPartitions / scale).toInt val curNumPartitions = numPartitions partiallyAggregated = partiallyAggregated.mapPartitionsWithIndex { (i, iter) => iter.map((i % curNumPartitions, _)) }.reduceByKey(new HashPartitioner(curNumPartitions), combOp) .values } require(partiallyAggregated.partitions.length == 1) partiallyAggregated } } object VectorRDDFunctions { implicit def fromVectorRDD(rdd: RDD[Vector]): VectorRDDFunctions = new VectorRDDFunctions(rdd) }
Example 115
Source File: VLBFGS1.scala From spark-vl-bfgs with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.optim import java.util.Random import scala.language.implicitConversions import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.ml.optim.VectorFreeLBFGS.{Oracle, VectorSpace} import org.apache.spark.ml.optim.VectorRDDFunctions._ import org.apache.spark.mllib.linalg.{BLAS, Vector, Vectors} import org.apache.spark.mllib.random.RandomRDDs import org.apache.spark.mllib.regression.LabeledPoint import org.apache.spark.rdd.{RDD, UnionRDD} import org.apache.spark.storage.StorageLevel private def gradient(data: RDD[Array[LabeledPoint]], dx: RDD[Vector]): RDD[Vector] = { data.cartesian(dx).map { case (points, x) => val g = Vectors.zeros(x.size) points.foreach { case LabeledPoint(b, a) => val err = BLAS.dot(a, x) - b BLAS.axpy(err, a, g) } g }.treeSum() } def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("VLBFGS").setMaster("local[*]") val sc = new SparkContext(conf) sc.setCheckpointDir("/tmp/checkpoint") val n = 1000 val p = 100 val random = new Random(0L) val xExact = Vectors.dense(Array.fill(p)(random.nextDouble())) val data = RandomRDDs.normalVectorRDD(sc, n, p, 4, 11L).mapPartitionsWithIndex { (idx, part) => val random = new Random(100 + idx) part.map { v => val target = BLAS.dot(v, xExact) + 0.1 * random.nextGaussian() LabeledPoint(target, v) } }.glom() .cache() val x = solve(data).first() println(s"x_exact = $xExact") println(s"x_vlbfgs = $x") sc.stop() } }
Example 116
Source File: RichJsValue.scala From sbt-coursera with BSD 3-Clause "New" or "Revised" License | 5 votes |
package ch.epfl.lamp import scala.language.implicitConversions import spray.json._ class RichJsValue(js: JsValue) { def \(name: String): JsValue = js match { case JsObject(fields) => fields(name) case _ => throw new IllegalArgumentException("Cannot select field " + name + " from non-JsObject " + js) } def hasFieldNamed(name: String) = js match { case JsObject(fields) => fields.contains(name) case _ => false } def arrayValues: List[JsValue] = js match { case JsArray(values) => values.toList case _ => throw new IllegalArgumentException("Trying to select values from non-JsArray" + js) } } object RichJsValue { implicit def enrichJsValue(js: JsValue) = new RichJsValue(js) }
Example 117
Source File: package.scala From JustinDB with Apache License 2.0 | 5 votes |
package justin.db import scala.language.implicitConversions package object vectorclocks { implicit class VectorClockOps(plain: String) { def toVectorClock[Id](implicit string2Id: String => Id): VectorClock[Id] = VectorClock.apply { plain.split(",").map { s => val Array(key, value) = s.trim.split(":") (string2Id(key), Counter(value.toInt)) }.toMap } } object VectorClockOps { implicit def stringAsId(s: String): VectorClock[String] = s.toVectorClock[String] implicit def intAsId(s: String): VectorClock[Int] = s.toVectorClock[Int](_.toInt) } }
Example 118
Source File: Data.scala From JustinDB with Apache License 2.0 | 5 votes |
package justin.db import java.util.UUID import justin.db.consistenthashing.NodeId import justin.db.replica.PreferenceList import justin.db.storage.JustinData import justin.db.vectorclocks.VectorClock import justin.db.versioning.NodeIdVectorClockBase64 import scala.language.implicitConversions case class Data(id: UUID, value: String, vclock: VectorClock[NodeId] = VectorClock(), timestamp: Long = System.currentTimeMillis()) object Data { def updateVclock(data: Data, preferenceList: PreferenceList): Data = { val nodeIds = preferenceList.all data.copy(vclock = nodeIds.foldLeft(data.vclock)(_ increase _)) } implicit def toInternal(data: Data): JustinData = { val encodedVClock = new NodeIdVectorClockBase64().encode(data.vclock).get // TODO: check if encoding of VClock is possible (make it typesafe) JustinData(data.id, data.value, encodedVClock, data.timestamp) } implicit def fromInternal(justinData: JustinData): Data = { val decodedVClock = new NodeIdVectorClockBase64().decode(justinData.vclock).get // TODO: check if decoding of VClock is possible (make it typesafe) Data(justinData.id, justinData.value, decodedVClock, justinData.timestamp) } }
Example 119
Source File: JsonImplicits.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.ops.v4 import play.api.libs.json._ import scala.language.implicitConversions private[ops] class JsonImplicits private[ops] extends ImplicitTupleFormats with ImplicitEmptyIterableReads { implicit def jsValueOps(json: JsValue): JsValueOps = new JsValueOps(json) implicit def formatOps(format: Format.type): FormatOps.type = FormatOps implicit def oformatOps(oformat: OFormat.type): OFormatOps.type = OFormatOps implicit def abstractJsonOps(json: Json.type): AbstractJsonOps.type = AbstractJsonOps implicit def abstractJsonOps(json: TypeKeyExtractor.type): AbstractJsonOps.type = AbstractJsonOps implicit def readsMap[K: ReadsKey, V: Reads]: Reads[Map[K, V]] = { val readsK = ReadsKey.of[K] val stringKeyReader = Reads.map[V] stringKeyReader.flatMap { a => Reads[Map[K, V]] { _ => val initResult: JsResult[Map[K, V]] = JsSuccess(Map()) a.map { case (k, v) => (readsK.read(k), v) }.foldLeft(initResult) { case (JsSuccess(acc, _), (JsSuccess(k, _), v)) => JsSuccess(acc.updated(k, v)) case (JsSuccess(_, _), (firstError: JsError, _)) => firstError case (accErrors: JsError, (errors: JsError, _)) => accErrors ++ errors case (accErrors: JsError, _) => accErrors } } } } implicit def writesMap[K: WritesKey, V: Writes]: Writes[Map[K, V]] = { val writesK = WritesKey.of[K] val stringKeyWriter = Writes.map[V] Writes[Map[K, V]](values => stringKeyWriter.writes(values.map { case (k, v) => (writesK.write(k), v) })) } }
Example 120
Source File: Depth.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.scalacheck import scala.language.implicitConversions class Depth private[Depth] (val depth: Int) extends AnyVal with Counted with Proxy { override def self = depth override protected def throwOnNegative(): Nothing = throw new IllegalArgumentException("Depth cannot be negative") @inline override def count: Int = depth def -(that: Depth) = Depth(this.depth - that.depth) def +(that: Depth) = new Depth(this.depth + that.depth) // no need to validate } object Depth extends (Int => Depth) { implicit def fromInt(int: Int): Depth = Depth(int) implicit def toInt(depth: Depth): Int = depth.depth override def apply(depth: Int): Depth = { val d = new Depth(depth) d.validate() d } }
Example 121
Source File: DurationGenerators.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.scalacheck import org.scalacheck.{Arbitrary, Gen} import scala.concurrent.duration._ import scala.concurrent.duration.ops.v4._ import scala.language.implicitConversions trait DurationGenerators { implicit val arbTimeUnit: Arbitrary[TimeUnit] = Arbitrary { Gen.oneOf( DAYS, HOURS, MICROSECONDS, MILLISECONDS, MINUTES, NANOSECONDS, SECONDS ) } private val minNanos = Long.MinValue + 1 private val maxNanos = Long.MaxValue implicit def arbFiniteDuration(implicit timeUnitArb: Arbitrary[TimeUnit]): Arbitrary[FiniteDuration] = Arbitrary { for { length <- Gen.chooseNum(minNanos, maxNanos) unit <- timeUnitArb.arbitrary } yield Duration(length, NANOSECONDS).toUnitPrecise(unit) } implicit def arbDuration(implicit timeUnitArb: Arbitrary[TimeUnit]): Arbitrary[Duration] = Arbitrary { for { length <- Gen.oneOf( Gen.chooseNum(minNanos, maxNanos), Gen.oneOf(Double.NegativeInfinity, Double.MinPositiveValue, Double.PositiveInfinity, Double.NaN) ) unit <- timeUnitArb.arbitrary } yield length match { case nanos: Long => Duration(nanos, NANOSECONDS).toUnitPrecise(unit) case inf: Double => Duration(inf, NANOSECONDS) } } } object DurationGenerators extends DurationGenerators
Example 122
Source File: Width.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.scalacheck import scala.language.implicitConversions class Width private[Width] (val width: Int) extends AnyVal with Counted with Proxy { override def self = width override protected def throwOnNegative(): Nothing = throw new IllegalArgumentException("Width cannot be negative") @inline override def count: Int = width def -(that: Width) = Width(this.width - that.width) def +(that: Width) = new Width(this.width + that.width) // no need to validate } object Width extends (Int => Width) { implicit def fromInt(int: Int): Width = Width(int) implicit def toInt(width: Width): Int = width.width override def apply(width: Int): Width = { val w = new Width(width) w.validate() w } }
Example 123
Source File: Depth.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.scalacheck import scala.language.implicitConversions class Depth private[Depth] (val depth: Int) extends AnyVal with Counted with Proxy { override def self = depth override protected def throwOnNegative(): Nothing = throw new IllegalArgumentException("Depth cannot be negative") @inline override def count: Int = depth def -(that: Depth) = Depth(this.depth - that.depth) def +(that: Depth) = new Depth(this.depth + that.depth) // no need to validate } object Depth extends (Int => Depth) { implicit def fromInt(int: Int): Depth = Depth(int) implicit def toInt(depth: Depth): Int = depth.depth override def apply(depth: Int): Depth = { val d = new Depth(depth) d.validate() d } }
Example 124
Source File: DurationGenerators.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.scalacheck import org.scalacheck.{Arbitrary, Gen} import scala.concurrent.duration._ import scala.concurrent.duration.ops.v4._ import scala.language.implicitConversions trait DurationGenerators { implicit val arbTimeUnit: Arbitrary[TimeUnit] = Arbitrary { Gen.oneOf( DAYS, HOURS, MICROSECONDS, MILLISECONDS, MINUTES, NANOSECONDS, SECONDS ) } private val minNanos = Long.MinValue + 1 private val maxNanos = Long.MaxValue implicit def arbFiniteDuration(implicit timeUnitArb: Arbitrary[TimeUnit]): Arbitrary[FiniteDuration] = Arbitrary { for { length <- Gen.chooseNum(minNanos, maxNanos) unit <- timeUnitArb.arbitrary } yield Duration(length, NANOSECONDS).toUnitPrecise(unit) } implicit def arbDuration(implicit timeUnitArb: Arbitrary[TimeUnit]): Arbitrary[Duration] = Arbitrary { for { length <- Gen.oneOf( Gen.chooseNum(minNanos, maxNanos), Gen.oneOf(Double.NegativeInfinity, Double.MinPositiveValue, Double.PositiveInfinity, Double.NaN) ) unit <- timeUnitArb.arbitrary } yield length match { case nanos: Long => Duration(nanos, NANOSECONDS).toUnitPrecise(unit) case inf: Double => Duration(inf, NANOSECONDS) } } } object DurationGenerators extends DurationGenerators
Example 125
Source File: Width.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.scalacheck import scala.language.implicitConversions class Width private[Width] (val width: Int) extends AnyVal with Counted with Proxy { override def self = width override protected def throwOnNegative(): Nothing = throw new IllegalArgumentException("Width cannot be negative") @inline override def count: Int = width def -(that: Width) = Width(this.width - that.width) def +(that: Width) = new Width(this.width + that.width) // no need to validate } object Width extends (Int => Width) { implicit def fromInt(int: Int): Width = Width(int) implicit def toInt(width: Width): Int = width.width override def apply(width: Int): Width = { val w = new Width(width) w.validate() w } }
Example 126
Source File: Depth.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.scalacheck import scala.language.implicitConversions class Depth private[Depth] (val depth: Int) extends AnyVal with Counted { override protected def throwOnNegative(): Nothing = throw new IllegalArgumentException("Depth cannot be negative") @inline override def count: Int = depth def -(that: Depth) = Depth(this.depth - that.depth) def +(that: Depth) = new Depth(this.depth + that.depth) // no need to validate } object Depth extends (Int => Depth) { implicit def fromInt(int: Int): Depth = Depth(int) implicit def toInt(depth: Depth): Int = depth.depth override def apply(depth: Int): Depth = { val d = new Depth(depth) d.validate() d } }
Example 127
Source File: DurationGenerators.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.scalacheck import org.scalacheck.{Arbitrary, Gen} import scala.concurrent.duration._ import scala.concurrent.duration.ops.v4._ import scala.language.implicitConversions trait DurationGenerators { implicit val arbTimeUnit: Arbitrary[TimeUnit] = Arbitrary { Gen.oneOf( DAYS, HOURS, MICROSECONDS, MILLISECONDS, MINUTES, NANOSECONDS, SECONDS ) } private val minNanos = Long.MinValue + 1 private val maxNanos = Long.MaxValue implicit def arbFiniteDuration(implicit timeUnitArb: Arbitrary[TimeUnit]): Arbitrary[FiniteDuration] = Arbitrary { for { length <- Gen.chooseNum(minNanos, maxNanos) unit <- timeUnitArb.arbitrary } yield Duration(length, NANOSECONDS).toUnitPrecise(unit) } implicit def arbDuration(implicit timeUnitArb: Arbitrary[TimeUnit]): Arbitrary[Duration] = Arbitrary { for { length <- Gen.oneOf( Gen.chooseNum(minNanos, maxNanos), Gen.oneOf(Double.NegativeInfinity, Double.MinPositiveValue, Double.PositiveInfinity, Double.NaN) ) unit <- timeUnitArb.arbitrary } yield length match { case nanos: Long => Duration(nanos, NANOSECONDS).toUnitPrecise(unit) case inf: Double => Duration(inf, NANOSECONDS) } } } object DurationGenerators extends DurationGenerators
Example 128
Source File: Width.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.scalacheck import scala.language.implicitConversions class Width private[Width] (val width: Int) extends AnyVal with Counted { override protected def throwOnNegative(): Nothing = throw new IllegalArgumentException("Width cannot be negative") @inline override def count: Int = width def -(that: Width) = Width(this.width - that.width) def +(that: Width) = new Width(this.width + that.width) // no need to validate } object Width extends (Int => Width) { implicit def fromInt(int: Int): Width = Width(int) implicit def toInt(width: Width): Int = width.width override def apply(width: Int): Width = { val w = new Width(width) w.validate() w } }
Example 129
Source File: JsonImplicits.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.ops.v4 import play.api.libs.json._ import scala.language.implicitConversions private[ops] class JsonImplicits private[ops] extends ImplicitTupleFormats with ImplicitEmptyIterableReads { implicit def jsValueOps(json: JsValue): JsValueOps = new JsValueOps(json) implicit def formatOps(format: Format.type): FormatOps.type = FormatOps implicit def oformatOps(oformat: OFormat.type): OFormatOps.type = OFormatOps implicit def abstractJsonOps(json: Json.type): AbstractJsonOps.type = AbstractJsonOps implicit def abstractJsonOps(json: TypeKeyExtractor.type): AbstractJsonOps.type = AbstractJsonOps implicit def readsMap[K: ReadsKey, V: Reads]: Reads[Map[K, V]] = { val readsK = ReadsKey.of[K] val stringKeyReader = Reads.map[V] stringKeyReader.flatMap { a => Reads[Map[K, V]] { _ => val initResult: JsResult[Map[K, V]] = JsSuccess(Map()) a.map { case (k, v) => (readsK.read(k), v) }.foldLeft(initResult) { case (JsSuccess(acc, _), (JsSuccess(k, _), v)) => JsSuccess(acc + (k -> v)) case (JsSuccess(_, _), (firstError: JsError, _)) => firstError case (accErrors: JsError, (errors: JsError, _)) => accErrors ++ errors case (accErrors: JsError, _) => accErrors } } } } implicit def writesMap[K: WritesKey, V: Writes]: Writes[Map[K, V]] = { val writesK = WritesKey.of[K] val stringKeyWriter = Writes.map[V] Writes[Map[K, V]](values => stringKeyWriter.writes(values.map { case (k, v) => (writesK.write(k), v) })) } }
Example 130
Source File: Change.scala From levsha with Apache License 2.0 | 5 votes |
package levsha import levsha.impl.DiffRenderContext.ChangesPerformer import scala.collection.mutable import scala.language.implicitConversions sealed trait Change { def id: List[Int] } object Change { implicit def parseId(s: String): List[Int] = s.split('_').toList.map(_.toInt) final class DiffTestChangesPerformer extends ChangesPerformer { private val buffer = mutable.Buffer.empty[Change] def removeAttr(id: Id, xmlNs: String, name: String): Unit = buffer += Change.removeAttr(id.toList.map(_.toInt), xmlNs: String, name) def removeStyle(id: Id, name: String): Unit = buffer += Change.removeStyle(id.toList.map(_.toInt), name) def remove(id: Id): Unit = buffer += Change.remove(id.toList.map(_.toInt)) def setAttr(id: Id, xmlNs: String, name: String, value: String): Unit = buffer += Change.setAttr(id.toList.map(_.toInt), name, xmlNs, value) def setStyle(id: Id, name: String, value: String): Unit = buffer += Change.setStyle(id.toList.map(_.toInt), name, value) def createText(id: Id, text: String): Unit = buffer += Change.createText(id.toList.map(_.toInt), text) def create(id: Id, xmlNs: String, tag: String): Unit = buffer += Change.create(id.toList.map(_.toInt), tag, xmlNs) def result: Seq[Change] = buffer.toVector } case class removeAttr(id: List[Int], xmlNs: String, name: String) extends Change case class removeStyle(id: List[Int], name: String) extends Change case class remove(id: List[Int]) extends Change case class setAttr(id: List[Int], name: String, xmlNs: String, value: String) extends Change case class setStyle(id: List[Int], name: String, value: String) extends Change case class createText(id: List[Int], text: String) extends Change case class create(id: List[Int], tag: String, xmlNs: String) extends Change implicit val ordering = new Ordering[Change] { import Ordering.Implicits._ private val underlying = implicitly[Ordering[List[Int]]] def compare(x: Change, y: Change): Int = { underlying.compare(x.id, y.id) } } }
Example 131
Source File: package.scala From aws-kinesis-scala with Apache License 2.0 | 5 votes |
package jp.co.bizreach import java.nio.ByteBuffer import com.amazonaws.services.kinesisfirehose.model.{ PutRecordRequest => AWSPutRecordRequest, Record => AWSRecord, PutRecordResult => AWSPutRecordResult, PutRecordBatchRequest => AWSPutRecordBatchRequest, PutRecordBatchResult => AWSPutRecordBatchResult} import scala.collection.JavaConverters._ import scala.language.implicitConversions package object kinesisfirehose { private[this] implicit class JListConverters[A](list: java.util.List[A]) { def immutableSeq: Seq[A] = list.asScala.toSeq } case class PutRecordRequest(deliveryStreamName: String, record: Array[Byte]) implicit def convertPutRecordRequest(request: PutRecordRequest): AWSPutRecordRequest = { val awsRequest = new AWSPutRecordRequest() awsRequest.setDeliveryStreamName(request.deliveryStreamName) awsRequest.setRecord(new AWSRecord().withData(ByteBuffer.wrap(request.record))) awsRequest } case class PutRecordResult(recordId: String) implicit def convertPutRecordResult(result: AWSPutRecordResult): PutRecordResult = { PutRecordResult( recordId = result.getRecordId ) } case class PutRecordBatchRequest(deliveryStreamName: String, records: Seq[Array[Byte]]) implicit def convertPutRecordBatchRequest(request: PutRecordBatchRequest): AWSPutRecordBatchRequest = { val awsRequest = new AWSPutRecordBatchRequest() awsRequest.setDeliveryStreamName(request.deliveryStreamName) awsRequest.setRecords(request.records.map { record => new AWSRecord().withData(ByteBuffer.wrap(record)) }.asJava) awsRequest } case class PutRecordBatchResult(failedPutCount: Int, records: Seq[PutRecordBatchResponseEntry]) case class PutRecordBatchResponseEntry(recordId: String, errorCode: String, errorMessage: String) implicit def convertPutRecordBatchResult(result: AWSPutRecordBatchResult): PutRecordBatchResult = { PutRecordBatchResult( failedPutCount = result.getFailedPutCount, records = result.getRequestResponses.immutableSeq.map { record => PutRecordBatchResponseEntry( recordId = record.getRecordId, errorCode = record.getErrorCode, errorMessage = record.getErrorMessage ) } ) } }
Example 132
Source File: DockerKit.scala From docker-it-scala with MIT License | 5 votes |
package com.whisk.docker import java.util.concurrent.Executors import org.slf4j.LoggerFactory import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} import scala.language.implicitConversions trait DockerKit { implicit def dockerFactory: DockerFactory private lazy val log = LoggerFactory.getLogger(this.getClass) val PullImagesTimeout = 20.minutes val StartContainersTimeout = 20.seconds val StopContainersTimeout = 10.seconds def dockerContainers: List[DockerContainer] = Nil // we need ExecutionContext in order to run docker.init() / docker.stop() there implicit lazy val dockerExecutionContext: ExecutionContext = { // using Math.max to prevent unexpected zero length of docker containers ExecutionContext.fromExecutor( Executors.newFixedThreadPool(Math.max(1, dockerContainers.length * 2))) } implicit lazy val dockerExecutor = dockerFactory.createExecutor() lazy val containerManager = new DockerContainerManager(dockerContainers, dockerExecutor) def isContainerReady(container: DockerContainer): Future[Boolean] = containerManager.isReady(container) def getContainerState(container: DockerContainer): DockerContainerState = { containerManager.getContainerState(container) } implicit def containerToState(c: DockerContainer): DockerContainerState = { getContainerState(c) } def startAllOrFail(): Unit = { Await.result(containerManager.pullImages(), PullImagesTimeout) val allRunning: Boolean = try { val future: Future[Boolean] = containerManager.initReadyAll(StartContainersTimeout).map(_.map(_._2).forall(identity)) sys.addShutdownHook( Await.ready(containerManager.stopRmAll(), StopContainersTimeout) ) Await.result(future, StartContainersTimeout) } catch { case e: Exception => log.error("Exception during container initialization", e) false } if (!allRunning) { Await.ready(containerManager.stopRmAll(), StopContainersTimeout) throw new RuntimeException("Cannot run all required containers") } } def stopAllQuietly(): Unit = { try { Await.ready(containerManager.stopRmAll(), StopContainersTimeout) } catch { case e: Throwable => log.error(e.getMessage, e) } } }
Example 133
Source File: ImplicitSyntax.scala From dsptools with BSD 3-Clause "New" or "Revised" License | 5 votes |
// See LICENSE for license details. package dsptools.numbers import chisel3.Data import scala.language.implicitConversions trait EqSyntax { implicit def eqOps[A <: Data:Eq](a: A): EqOps[A] = new EqOps(a) } trait PartialOrderSyntax extends EqSyntax { implicit def partialOrderOps[A <: Data:PartialOrder](a: A): PartialOrderOps[A] = new PartialOrderOps(a) } trait OrderSyntax extends PartialOrderSyntax { implicit def orderOps[A <: Data:Order](a: A): OrderOps[A] = new OrderOps(a) } trait SignedSyntax { implicit def signedOps[A <: Data:Signed](a: A): SignedOps[A] = new SignedOps(a) } trait IsRealSyntax extends OrderSyntax with SignedSyntax { implicit def isRealOps[A <: Data:IsReal](a: A): IsRealOps[A] = new IsRealOps(a) } trait IsIntegerSyntax extends IsRealSyntax { implicit def isIntegerOps[A <: Data:IsIntegral](a: A): IsIntegerOps[A] = new IsIntegerOps(a) } trait ConvertableToSyntax { implicit def convertableToOps[A <: Data:ConvertableTo](a: A): ConvertableToOps[A] = new ConvertableToOps(a) } trait ChiselConvertableFromSyntax { implicit def chiselConvertableFromOps[A <: Data:ChiselConvertableFrom](a: A): ChiselConvertableFromOps[A] = new ChiselConvertableFromOps(a) } trait BinaryRepresentationSyntax { implicit def binaryRepresentationOps[A <: Data:BinaryRepresentation](a: A): BinaryRepresentationOps[A] = new BinaryRepresentationOps(a) } trait ContextualRingSyntax { implicit def contextualRingOps[A <: Data:Ring](a: A): ContextualRingOps[A] = new ContextualRingOps(a) }
Example 134
Source File: KafkaMessagingProvider.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.connector.kafka import java.util.Properties import akka.actor.ActorSystem import org.apache.kafka.clients.admin.{AdminClient, AdminClientConfig, NewTopic} import org.apache.kafka.common.errors.{RetriableException, TopicExistsException} import pureconfig._ import pureconfig.generic.auto._ import org.apache.openwhisk.common.{CausedBy, Logging} import org.apache.openwhisk.core.{ConfigKeys, WhiskConfig} import org.apache.openwhisk.core.connector.{MessageConsumer, MessageProducer, MessagingProvider} import org.apache.openwhisk.core.entity.ByteSize import scala.collection.JavaConverters._ import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} case class KafkaConfig(replicationFactor: Short, consumerLagCheckInterval: FiniteDuration) def verifyConfig(config: Map[String, String], validKeys: Set[String])(implicit logging: Logging): Boolean = { val passedKeys = config.keySet val knownKeys = validKeys intersect passedKeys val unknownKeys = passedKeys -- knownKeys if (unknownKeys.nonEmpty) { logging.warn(this, s"potential misconfiguration, unknown settings: ${unknownKeys.mkString(",")}") false } else { true } } }
Example 135
Source File: ConcurrentMapBackedCache.scala From openwhisk with Apache License 2.0 | 5 votes |
private class ConcurrentMapBackedCache[V](store: ConcurrentMap[Any, Future[V]]) { val cache = this def apply(key: Any) = new Keyed(key) class Keyed(key: Any) { def apply(magnet: => ValueMagnet[V])(implicit ec: ExecutionContext): Future[V] = cache.apply( key, () => try magnet.future catch { case NonFatal(e) => Future.failed(e) }) } def apply(key: Any, genValue: () => Future[V])(implicit ec: ExecutionContext): Future[V] = { store.computeIfAbsent( key, new java.util.function.Function[Any, Future[V]]() { override def apply(key: Any): Future[V] = { val future = genValue() future.onComplete { value => // in case of exceptions we remove the cache entry (i.e. try again later) if (value.isFailure) store.remove(key, future) } future } }) } def remove(key: Any) = Option(store.remove(key)) def size = store.size } class ValueMagnet[V](val future: Future[V]) object ValueMagnet { import scala.language.implicitConversions implicit def fromAny[V](block: V): ValueMagnet[V] = fromFuture(Future.successful(block)) implicit def fromFuture[V](future: Future[V]): ValueMagnet[V] = new ValueMagnet(future) }
Example 136
Source File: UserDefinedFunction.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.runtime.function import ml.combust.mleap.core.reflection.MleapReflection._ import ml.combust.mleap.core.types.{DataType, StructType, TypeSpec} import scala.language.implicitConversions import scala.reflect.runtime.universe.TypeTag object UserDefinedFunction { def apply(f: AnyRef, output: StructType, input: StructType): UserDefinedFunction = { UserDefinedFunction(f, output: TypeSpec, input.fields.map(_.dataType: TypeSpec)) } def apply(f: AnyRef, output: DataType, inputs: Seq[TypeSpec]): UserDefinedFunction = { UserDefinedFunction(f, output: TypeSpec, inputs) } def apply(f: AnyRef, output: TypeSpec, input0: DataType, inputs: DataType *): UserDefinedFunction = { UserDefinedFunction(f, output, (input0 +: inputs).map(d => d: TypeSpec)) } def apply(f: AnyRef, output: DataType, input0: DataType, inputs: DataType *): UserDefinedFunction = { UserDefinedFunction(f, output, (input0 +: inputs).map(d => d: TypeSpec)) } implicit def function0[RT: TypeTag](f: () => RT): UserDefinedFunction = { UserDefinedFunction(f, typeSpec[RT], Seq()) } implicit def function1[RT: TypeTag, T1: TypeTag](f: (T1) => RT): UserDefinedFunction = { UserDefinedFunction(f, typeSpec[RT], dataType[T1]) } implicit def function2[RT: TypeTag, T1: TypeTag, T2: TypeTag](f: (T1, T2) => RT): UserDefinedFunction = { UserDefinedFunction(f, typeSpec[RT], dataType[T1], dataType[T2]) } implicit def function3[RT: TypeTag, T1: TypeTag, T2: TypeTag, T3: TypeTag](f: (T1, T2, T3) => RT): UserDefinedFunction = { UserDefinedFunction(f, typeSpec[RT], dataType[T1], dataType[T2], dataType[T3]) } implicit def function4[RT: TypeTag, T1: TypeTag, T2: TypeTag, T3: TypeTag, T4: TypeTag](f: (T1, T2, T3, T4) => RT): UserDefinedFunction = { UserDefinedFunction(f, typeSpec[RT], dataType[T1], dataType[T2], dataType[T3], dataType[T4]) } implicit def function5[RT: TypeTag, T1: TypeTag, T2: TypeTag, T3: TypeTag, T4: TypeTag, T5: TypeTag](f: (T1, T2, T3, T4, T5) => RT): UserDefinedFunction = { UserDefinedFunction(f, typeSpec[RT], dataType[T1], dataType[T2], dataType[T3], dataType[T4], dataType[T5]) } } case class UserDefinedFunction(f: AnyRef, output: TypeSpec, inputs: Seq[TypeSpec]) { def outputTypes: Seq[DataType] = output.dataTypes def withInputs(inputs: Seq[TypeSpec]): UserDefinedFunction = copy(inputs = inputs) def withInputs(schema: StructType): UserDefinedFunction = withDataTypeInputs(schema.fields.map(_.dataType)) def withDataTypeInputs(inputs: Seq[DataType]): UserDefinedFunction = copy(inputs = inputs.map(dt => dt: TypeSpec)) def withOutput(dt: DataType): UserDefinedFunction = copy(output = dt) def withOutput(schema: StructType): UserDefinedFunction = copy(output = schema) }
Example 137
Source File: BundleTypeConverters.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.runtime.types import ml.bundle import ml.bundle.{DataShapeType, TensorDimension} import ml.combust.mleap.core.types._ import scala.language.implicitConversions trait BundleTypeConverters { implicit def bundleToMleapBasicType(b: bundle.BasicType): BasicType = { b match { case bundle.BasicType.BOOLEAN => BasicType.Boolean case bundle.BasicType.BYTE => BasicType.Byte case bundle.BasicType.SHORT => BasicType.Short case bundle.BasicType.INT => BasicType.Int case bundle.BasicType.LONG => BasicType.Long case bundle.BasicType.FLOAT => BasicType.Float case bundle.BasicType.DOUBLE => BasicType.Double case bundle.BasicType.STRING => BasicType.String case bundle.BasicType.BYTE_STRING => BasicType.ByteString case _ => throw new IllegalArgumentException(s"unsupported data type $b") } } implicit def mleapToBundleBasicType(b: BasicType): bundle.BasicType = b match { case BasicType.Boolean => bundle.BasicType.BOOLEAN case BasicType.Byte => bundle.BasicType.BYTE case BasicType.Short => bundle.BasicType.SHORT case BasicType.Int => bundle.BasicType.INT case BasicType.Long => bundle.BasicType.LONG case BasicType.Float => bundle.BasicType.FLOAT case BasicType.Double => bundle.BasicType.DOUBLE case BasicType.String => bundle.BasicType.STRING case BasicType.ByteString => bundle.BasicType.BYTE_STRING case _ => throw new IllegalArgumentException(s"unsupported type $b") } implicit def bundleToMleapShape(s: bundle.DataShape): DataShape = { s.base match { case DataShapeType.SCALAR => ScalarShape(isNullable = s.isNullable) case DataShapeType.LIST => ListShape(isNullable = s.isNullable) case DataShapeType.TENSOR => TensorShape(dimensions = s.tensorShape.map(_.dimensions.map(_.size)), isNullable = s.isNullable) case _ => throw new IllegalArgumentException(s"unsupported shape $s") } } implicit def mleapToBundleShape(s: DataShape): bundle.DataShape = { s match { case ScalarShape(isNullable) => bundle.DataShape(base = DataShapeType.SCALAR, isNullable = isNullable) case ListShape(isNullable) => bundle.DataShape(base = DataShapeType.LIST, isNullable = isNullable) case TensorShape(dimensions, isNullable) => bundle.DataShape(base = DataShapeType.TENSOR, isNullable = isNullable, tensorShape = dimensions.map(_.map(s => TensorDimension(s))).map(ml.bundle.TensorShape.apply)) } } implicit def mleapToBundleDataType(dt: DataType): bundle.DataType = bundle.DataType(dt.base, Some(dt.shape)) implicit def bundleToMleapDataType(dt: bundle.DataType): DataType = DataType(dt.base, dt.shape.get) implicit def mleapToBundleField(field: StructField): bundle.Field = bundle.Field(field.name, Some(field.dataType)) implicit def bundleToMleapField(field: bundle.Field): StructField = StructField(field.name, field.dataType.get) implicit def mleapToBundleSchema(schema: StructType): bundle.Schema = bundle.Schema(schema.fields.map(mleapToBundleField)) implicit def bundleToMleapSchema(schema: bundle.Schema): StructType = StructType(schema.fields.map(bundleToMleapField)).get implicit def mleapToBundleSocket(socket: Socket): bundle.Socket = bundle.Socket(socket.port, socket.name) implicit def bundleToMleapSocket(socket: bundle.Socket): Socket = Socket(socket.port, socket.name) implicit def mleapToBundleNodeShape(shape: NodeShape): bundle.NodeShape = bundle.NodeShape(shape.inputs.values.map(mleapToBundleSocket).toSeq, shape.outputs.values.map(mleapToBundleSocket).toSeq) implicit def bundleToMleapNodeShape(shape: bundle.NodeShape): NodeShape = NodeShape(shape.inputs.map(bundleToMleapSocket), shape.outputs.map(bundleToMleapSocket)) } object BundleTypeConverters extends BundleTypeConverters
Example 138
Source File: TypeSpec.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.core.types import scala.language.implicitConversions object TypeSpec { implicit def apply(dt: DataType): DataTypeSpec = DataTypeSpec(dt) implicit def apply(schema: StructType): SchemaSpec = SchemaSpec(schema) } sealed trait TypeSpec { def dataTypes: Seq[DataType] } case class DataTypeSpec(dt: DataType) extends TypeSpec { override val dataTypes: Seq[DataType] = Seq(dt) } object SchemaSpec { def apply(schema: StructType): SchemaSpec = SchemaSpec(schema.fields.map(_.dataType)) } case class SchemaSpec(dts: Seq[DataType]) extends TypeSpec { override def dataTypes: Seq[DataType] = dts }
Example 139
Source File: VectorConverters.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.core.util import breeze.linalg.{DenseVector => BDV, SparseVector => BSV, Vector => BV} import ml.combust.mleap.tensor.{DenseTensor, SparseTensor, Tensor} import org.apache.spark.ml.linalg.{DenseMatrix, DenseVector, Matrices, Matrix, SparseMatrix, SparseVector, Vector, Vectors} import scala.language.implicitConversions trait VectorConverters { implicit def sparkVectorToMleapTensor(vector: Vector): Tensor[Double] = vector match { case vector: DenseVector => DenseTensor(vector.toArray, Seq(vector.size)) case vector: SparseVector => SparseTensor(indices = vector.indices.map(i => Seq(i)), values = vector.values, dimensions = Seq(vector.size)) } implicit def mleapTensorToSparkVector(tensor: Tensor[Double]): Vector = tensor match { case tensor: DenseTensor[_] => Vectors.dense(tensor.rawValues.asInstanceOf[Array[Double]]) case tensor: SparseTensor[_] => Vectors.sparse(tensor.dimensions.product, tensor.indices.map(_.head).toArray, tensor.values.asInstanceOf[Array[Double]]) } implicit def sparkMatrixToMleapTensor(matrix: Matrix): Tensor[Double] = matrix match { case matrix: DenseMatrix => DenseTensor(matrix.toArray, Seq(matrix.numRows, matrix.numCols)) case matrix: SparseMatrix => val indices = matrix.rowIndices.zip(matrix.colPtrs).map { case (r, c) => Seq(r, c) }.toSeq SparseTensor(indices = indices, values = matrix.values, dimensions = Seq(matrix.numRows, matrix.numCols)) } implicit def mleapTensorToSparkMatrix(tensor: Tensor[Double]): Matrix = tensor match { case tensor: DenseTensor[_] => Matrices.dense(tensor.dimensions.head, tensor.dimensions(1), tensor.rawValues.asInstanceOf[Array[Double]]) case tensor: SparseTensor[_] => val (rows, cols) = tensor.indices.map(v => (v.head, v(1))).unzip Matrices.sparse(tensor.dimensions.head, tensor.dimensions(1), cols.toArray, rows.toArray, tensor.values.asInstanceOf[Array[Double]]) } implicit def breezeVectorToMLeapTensor(vector: BV[Double]): Tensor[Double] = vector match { case vector : BDV[Double] => DenseTensor(vector.toArray, Seq(vector.size)) case vector : BSV[Double] => SparseTensor(vector.index.map(i => Seq(i)), vector.data, Seq(vector.values.size)) } implicit def mleapTensorToBreezeVector(tensor: Tensor[Double]): BV[Double] = tensor match { case tensor: DenseTensor[_] => new BDV(tensor.rawValues.asInstanceOf[Array[Double]]) case tensor: SparseTensor[_] => new BSV(tensor.indices.map(_.head).toArray, tensor.values.asInstanceOf[Array[Double]], tensor.dimensions.product) } } object VectorConverters extends VectorConverters
Example 140
Source File: BundleTypeConverters.scala From mleap with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.bundle import ml.bundle import ml.bundle.{BasicType, DataShapeType} import org.apache.spark.ml.linalg.VectorUDT import org.apache.spark.sql.DataFrame import org.apache.spark.sql.types._ import scala.language.implicitConversions trait BundleTypeConverters { implicit def sparkToBundleDataShape(field: StructField) (implicit dataset: DataFrame): bundle.DataShape = { field.dataType match { case BooleanType | ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType | StringType | ArrayType(ByteType, false) => bundle.DataShape(DataShapeType.SCALAR) case ArrayType(_, _) => bundle.DataShape(DataShapeType.LIST) case _: VectorUDT => // collect size information from dataset if necessary bundle.DataShape(bundle.DataShapeType.TENSOR) case _ => throw new IllegalArgumentException(s"invalid shape for field $field") } } implicit def sparkToBundleBasicType(dataType: DataType) (implicit dataset: DataFrame): bundle.BasicType = { dataType match { case BooleanType => BasicType.BOOLEAN case ByteType => BasicType.BYTE case ShortType => BasicType.SHORT case IntegerType => BasicType.INT case LongType => BasicType.LONG case FloatType => BasicType.FLOAT case DoubleType => BasicType.DOUBLE case StringType => BasicType.STRING case ArrayType(ByteType, _) => BasicType.BYTE_STRING case ArrayType(dt, _) => sparkToBundleBasicType(dt) case _: VectorUDT => BasicType.DOUBLE case _ => throw new IllegalArgumentException(s"invalid spark basic type $dataType") } } implicit def sparkToBundleDataType(field: StructField) (implicit dataset: DataFrame): bundle.DataType = { bundle.DataType(field.dataType, Some(field)) } } object BundleTypeConverters extends BundleTypeConverters
Example 141
Source File: TypeConverters.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.springboot import java.net.URI import java.util.concurrent.TimeUnit import com.google.protobuf.ProtocolStringList import ml.combust.mleap.executor import ml.combust.mleap.pb._ import scala.concurrent.{ExecutionContext, Future, Promise} import scala.concurrent.duration.FiniteDuration import scala.concurrent.duration._ import scala.util.Try import ml.combust.mleap.runtime.types.BundleTypeConverters._ object TypeConverters { import scala.language.implicitConversions implicit def getTimeout(ms: Int): FiniteDuration = FiniteDuration(ms, TimeUnit.MILLISECONDS) implicit def pbToExecutorLoadModelRequest(request: LoadModelRequest): executor.LoadModelRequest = executor.LoadModelRequest(modelName = request.modelName, uri = URI.create(request.uri), config = request.config.map(pbToExecutorModelConfig), force = request.force) implicit def javaPbToExecutorLoadModelRequest(request: Mleap.LoadModelRequest): executor.LoadModelRequest = { val modelConfig = Option(request.getConfig).map(javaPbToExecutorModelConfig) executor.LoadModelRequest(modelName = request.getModelName, uri = URI.create(request.getUri), config = modelConfig, force = request.getForce) } implicit def pbToExecutorModelConfig(config: ModelConfig): executor.ModelConfig = { executor.ModelConfig(memoryTimeout = config.memoryTimeout.map(_.millis), diskTimeout = config.diskTimeout.map(_.millis)) } implicit def javaPbToExecutorModelConfig(config: Mleap.ModelConfig): executor.ModelConfig = { executor.ModelConfig(memoryTimeout = Option(config.getMemoryTimeout).map(_.getValue.millis), diskTimeout = Option(config.getDiskTimeout).map(_.getValue.millis)) } implicit def executorToPbModelConfig(config: executor.ModelConfig): ModelConfig = ModelConfig(memoryTimeout = config.memoryTimeout.map(_.toMillis), diskTimeout = config.diskTimeout.map(_.toMillis)) implicit def executorToPbModel(model: executor.Model): Model = Model(name = model.name, uri = model.uri.toString, config = Some(model.config)) implicit def pbToExecutorModel(model: Model): executor.Model = executor.Model(name = model.name, uri = URI.create(model.uri), config = model.config.get) implicit def executorToPbBundleMeta(meta: executor.BundleMeta): BundleMeta = BundleMeta(bundle = Some(meta.info.asBundle), inputSchema = Some(meta.inputSchema), outputSchema = Some(meta.outputSchema)) implicit def pbToExecutorTransformOptions(options: TransformOptions): executor.TransformOptions = executor.TransformOptions(select = options.select, selectMode = options.selectMode) implicit def javaPbToExecutorTransformOptions(options: Mleap.TransformOptions): executor.TransformOptions = executor.TransformOptions(select = options.getSelectList, selectMode = options.getSelectMode) implicit def javaPbToExecutorSelectMode(sm: Mleap.SelectMode): executor.SelectMode = if (sm == Mleap.SelectMode.SELECT_MODE_RELAXED) executor.SelectMode.Relaxed else if (sm == Mleap.SelectMode.SELECT_MODE_STRICT) executor.SelectMode.Strict else executor.SelectMode.Strict implicit def javaPbToExecutorSelect(select: ProtocolStringList): Option[Seq[String]] = if (select.isEmpty) None else Some(select.toArray().map(_.asInstanceOf[String]).toSeq) implicit def pbToExecutorSelectMode(sm: SelectMode): executor.SelectMode = if (sm.isSelectModeRelaxed) executor.SelectMode.Relaxed else if (sm.isSelectModeStrict) executor.SelectMode.Strict else executor.SelectMode.Strict implicit def pbToExecutorSelect(select: Seq[String]): Option[Seq[String]] = if (select.isEmpty) None else Some(select) implicit class RichFuture[T](f: Future[T]) { def mapAll[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[U] = { val p = Promise[U]() f.onComplete(r => p.complete(Try(pf(r))))(executor) p.future } } }
Example 142
Source File: ScallionParser.scala From scallion with Apache License 2.0 | 5 votes |
package json import scala.language.implicitConversions import scallion.input._ import scallion.lexical._ import scallion.syntactic._ class ScallionParser extends Syntaxes with ll1.Parsing with gzpwd.Parsing with simplell1.Parsing { type Token = json.Token type Kind = TokenClass import Implicits._ override def getKind(token: Token): TokenClass = token match { case SeparatorToken(value, _) => SeparatorClass(value) case BooleanToken(_, _) => BooleanClass case NumberToken(_, _) => NumberClass case StringToken(_, _) => StringClass case NullToken(_) => NullClass case _ => NoClass } val booleanValue: Syntax[Value] = accept(BooleanClass) { case BooleanToken(value, range) => BooleanValue(value, range) } val numberValue: Syntax[Value] = accept(NumberClass) { case NumberToken(value, range) => NumberValue(value, range) } val stringValue: Syntax[StringValue] = accept(StringClass) { case StringToken(value, range) => StringValue(value, range) } val nullValue: Syntax[Value] = accept(NullClass) { case NullToken(range) => NullValue(range) } implicit def separator(char: Char): Syntax[Token] = elem(SeparatorClass(char)) lazy val arrayValue: Syntax[Value] = ('[' ~ repsep(value, ',') ~ ']').map { case start ~ vs ~ end => ArrayValue(vs, (start.range._1, end.range._2)) } lazy val binding: Syntax[(StringValue, Value)] = (stringValue ~ ':' ~ value).map { case key ~ _ ~ value => (key, value) } lazy val objectValue: Syntax[Value] = ('{' ~ repsep(binding, ',') ~ '}').map { case start ~ bs ~ end => ObjectValue(bs, (start.range._1, end.range._2)) } lazy val value: Syntax[Value] = recursive { oneOf( arrayValue, objectValue, booleanValue, numberValue, stringValue.up[Value], nullValue) } lazy val parser = LL1(value) lazy val genParser = GZPWD(value) lazy val simpleParser = SimpleLL1(value) def apply(it: Iterator[Token]): Option[Value] = parser(it) match { case LL1.Parsed(value, _) => Some(value) case LL1.UnexpectedToken(token, _) => None case LL1.UnexpectedEnd(_) => None } def genApply(it: Iterator[Token]): Option[Value] = genParser(it) match { case GZPWD.Parsed(value, _) => Some(value) case GZPWD.UnexpectedToken(token, _) => None case GZPWD.UnexpectedEnd(_) => None } def simpleApply(it: Iterator[Token]): Option[Value] = simpleParser(it) match { case SimpleLL1.Parsed(value, _) => Some(value) case SimpleLL1.UnexpectedToken(token, _) => None case SimpleLL1.UnexpectedEnd(_) => None } }
Example 143
Source File: BulkTableWriter.scala From spark-cassandra-stress with Apache License 2.0 | 5 votes |
package com.datastax.bdp.spark.writer import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import com.datastax.spark.connector._ import com.datastax.spark.connector.writer._ import java.nio.file.{Path, Files} import scala.language.implicitConversions object BulkTableWriter{ implicit def toBulkTableWriter[T](rdd: RDD[T]): BulkTableWriter[T] = new BulkTableWriter(rdd) } class BulkTableWriter[T](rdd: RDD[T]) { def bulkSaveToCassandra(keyspaceName: String, tableName: String, columns: ColumnSelector = AllColumns, writeConf: BulkWriteConf = BulkWriteConf()): Unit = { throw new UnsupportedOperationException } } case class BulkWriteConf(outputDirectory: Option[Path] = None, deleteSource: Boolean = true, bufferSizeInMB: Int = 64)
Example 144
Source File: GeneratableDefinition.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.gen import de.leanovate.swaggercheck.schema.model._ import de.leanovate.swaggercheck.shrinkable.CheckJsValue import org.scalacheck.Gen import scala.language.implicitConversions trait GeneratableDefinition extends Definition { def generate(schema: GeneratableSchema): Gen[CheckJsValue] } object GeneratableDefinition { implicit def toGeneratable(definition: Definition): GeneratableDefinition = definition match { case definition: AllOfDefinition => GeneratableAllOf(definition) case definition: ArrayDefinition => GeneratableArray(definition) case BooleanDefinition => GeneratableBoolean case EmptyDefinition => GeneratableEmpty case definition: IntegerDefinition => GeneratableInteger(definition) case definition: NumberDefinition => GeneratableNumber(definition) case definition: ObjectDefinition => GeneratableObject(definition) case definition: OneOfDefinition => GeneratableOneOf(definition) case definition: StringDefinition => GeneratableString(definition) case definition: ReferenceDefinition => GeneratableReference(definition) } }
Example 145
Source File: GeneratableDefaultSchema.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.gen import de.leanovate.swaggercheck.schema.gen.GeneratableDefinition._ import de.leanovate.swaggercheck.schema.gen.formats.{GeneratableFormat, GeneratableIntegerFormats, GeneratableNumberFormats, GeneratableStringFormats} import de.leanovate.swaggercheck.schema.model.{DefaultSchema, Definition} import de.leanovate.swaggercheck.shrinkable.CheckJsValue import org.scalacheck.Gen import scala.language.implicitConversions case class GeneratableDefaultSchema( schema: DefaultSchema, randomAdditionalFields: Boolean = false, maxItems: Int = 10, stringFormats: Map[String, GeneratableFormat[String]] = GeneratableStringFormats.defaultFormats, integerFormats: Map[String, GeneratableFormat[BigInt]] = GeneratableIntegerFormats.defaultFormats, numberFormats: Map[String, GeneratableFormat[BigDecimal]] = GeneratableNumberFormats.defaultFormats ) extends GeneratableSchema { def generate: Gen[CheckJsValue] = schema.root.generate(this) def withRandomAdditionalFields() = copy(randomAdditionalFields = true) override def withMaxItems(newMaxItems: Int): GeneratableSchema = copy(maxItems = maxItems / 2) override def findGeneratableStringFormat(format: String): Option[GeneratableFormat[String]] = stringFormats.get(format) override def findGeneratableNumberFormat(format: String): Option[GeneratableFormat[BigDecimal]] = numberFormats.get(format) override def findGeneratableIntegerFormat(format: String): Option[GeneratableFormat[BigInt]] = integerFormats.get(format) override def findByRef(ref: String): Option[Definition] = schema.findByRef(ref) } object GeneratableDefaultSchema { implicit def toGeneratable(schema: DefaultSchema): GeneratableDefaultSchema = GeneratableDefaultSchema(schema) }
Example 146
Source File: SurfaceSpec.scala From airframe with Apache License 2.0 | 5 votes |
package wvlet.airframe.surface import wvlet.airspec.AirSpec import wvlet.log.LogSupport import scala.language.implicitConversions trait SurfaceSpec extends AirSpec with LogSupport { protected def check(body: => Surface, expectedName: String): Surface = { val surface = body debug(s"[${surface.getClass.getSimpleName}] $surface, ${surface.fullName}") surface.toString shouldBe expectedName surface } protected def checkPrimitive(body: => Surface, expectedName: String): Surface = { val s = check(body, expectedName) assert(s.isAlias == false) assert(s.isOption == false) assert(s.isPrimitive == true) assert(s.objectFactory.isEmpty == true) s } }
Example 147
Source File: HeaderAccessControl.scala From naptime with Apache License 2.0 | 5 votes |
package org.coursera.naptime.access import org.coursera.naptime.NaptimeActionException import org.coursera.naptime.access.authenticator.Authenticator import org.coursera.naptime.access.authenticator.Decorator import org.coursera.naptime.access.authenticator.HeaderAuthenticationParser import org.coursera.naptime.access.authorizer.AuthorizeResult import org.coursera.naptime.access.authorizer.Authorizer import org.coursera.naptime.access.combiner.And import org.coursera.naptime.access.combiner.AnyOf import org.coursera.naptime.access.combiner.EitherOf import org.coursera.naptime.access.combiner.SuccessfulOf import play.api.mvc.RequestHeader import scala.concurrent.ExecutionContext import scala.concurrent.Future private[naptime] def check(authInfo: A): Either[NaptimeActionException, A] } object HeaderAccessControl extends AnyOf with And with EitherOf with SuccessfulOf { def allowAll: HeaderAccessControl[Unit] = { val parser = HeaderAuthenticationParser.constant(()) val authorizer = Authorizer[Unit](_ => AuthorizeResult.Authorized) StructuredAccessControl(Authenticator(parser, Decorator.identity[Unit]), authorizer) } import scala.language.implicitConversions implicit def accessControlGenerator[BodyType, T]( accessControl: HeaderAccessControl[T]): (BodyType => HeaderAccessControl[T]) = { (b: BodyType) => accessControl } }
Example 148
Source File: package.scala From milan with Apache License 2.0 | 5 votes |
package com.amazon.milan.compiler.flink import com.amazon.milan.compiler.scala.ClassName import com.amazon.milan.compiler.flink.types._ import com.amazon.milan.compiler.flink.typeutil._ import com.amazon.milan.program.Duration import com.amazon.milan.typeutil.{FieldDescriptor, ObjectTypeDescriptor, TypeDescriptor} import org.apache.flink.streaming.api.windowing.time.Time import scala.language.implicitConversions package object generator { implicit class FlinkGeneratorTypeDescriptorExtensions[_](t: TypeDescriptor[_]) { def getFlinkTypeFullName: String = { if (t.isInstanceOf[TupleRecordTypeDescriptor[_]]) { ArrayRecord.typeName } else { t.fullName } } def toFlinkTerm: ClassName = ClassName(this.getFlinkTypeFullName) def wrappedWithKey(keyType: TypeDescriptor[_]): TypeDescriptor[RecordWrapper[_, _]] = { val fields = List( FieldDescriptor[Any]("value", this.t.asInstanceOf[TypeDescriptor[Any]]), FieldDescriptor[Any]("key", keyType.asInstanceOf[TypeDescriptor[Any]]), FieldDescriptor[Long]("sequenceNumber", com.amazon.milan.typeutil.types.Long) ) new ObjectTypeDescriptor[RecordWrapper[_, _]]( "com.amazon.milan.compiler.flink.types.RecordWrapper", List(this.t, keyType), fields) } } implicit class FlinkGeneratorDurationExtensions(duration: Duration) { def toFlinkTime: Time = Time.milliseconds(duration.milliseconds) } }
Example 149
Source File: package.scala From milan with Apache License 2.0 | 5 votes |
package com.amazon.milan.compiler.flink import java.time.Duration import java.util.concurrent.TimeoutException import com.amazon.milan.testing.Concurrent import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment import scala.concurrent.{Await, ExecutionContext, Future, blocking} import scala.language.implicitConversions package object testing { implicit def extendStreamExecutionEnvironment(env: StreamExecutionEnvironment): StreamExecutionEnvironmentExtensions = new StreamExecutionEnvironmentExtensions(env) implicit def extendFuture[T](future: Future[T]): FutureExtensions[T] = new FutureExtensions[T](future) implicit class DurationExtensions(d: Duration) { def toConcurrent: scala.concurrent.duration.Duration = scala.concurrent.duration.Duration(this.d.toMillis, scala.concurrent.duration.MILLISECONDS) } } class StreamExecutionEnvironmentExtensions(env: StreamExecutionEnvironment) { def executeThenWaitFor(predicate: () => Boolean, secondsToWait: Int): Unit = { if (!Concurrent.executeAndWait( () => env.execute(), predicate, Duration.ofSeconds(secondsToWait))) { throw new TimeoutException("Timed out waiting for stop condition.") } } def executeAsync(maxSeconds: Int): Future[Boolean] = { Concurrent.executeAsync(() => env.execute(), () => true, Duration.ofSeconds(maxSeconds)) } def executeUntilAsync(predicate: () => Boolean, secondsToWait: Int): Future[Unit] = { val result = Concurrent.executeAsync( () => env.execute(), predicate, Duration.ofSeconds(secondsToWait)) result.transform( success => if (!success) { throw new TimeoutException("Timed out waiting for stop condition.") }, ex => throw ex)(ExecutionContext.global) } def executeAtMost(maxSeconds: Int): Unit = { if (!Concurrent.executeUntil( () => env.execute(), () => true, Duration.ofSeconds(maxSeconds))) { throw new TimeoutException("Timed out waiting for stop condition.") } } } class FutureExtensions[T](future: Future[T]) { def thenWaitFor(duration: Duration)(implicit context: ExecutionContext): Future[T] = { Future { blocking { val result = Await.result(this.future, scala.concurrent.duration.Duration.Inf) Thread.sleep(duration.toMillis) result } } } }
Example 150
Source File: package.scala From milan with Apache License 2.0 | 5 votes |
package com.amazon.milan.compiler.flink import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import com.amazon.milan.compiler.flink.runtime.{UnwrapRecordsMapFunction, WrapRecordsMapFunction} import com.amazon.milan.compiler.flink.testing.IntKeyValueRecord import com.amazon.milan.compiler.flink.types.{RecordWrapper, RecordWrapperTypeInformation} import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.common.typeutils.TypeSerializer import org.apache.flink.api.java.typeutils.ResultTypeQueryable import org.apache.flink.core.memory.{DataInputView, DataInputViewStreamWrapper, DataOutputView, DataOutputViewStreamWrapper} import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.streaming.api.datastream.DataStream import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment import scala.language.implicitConversions import scala.util.Random package object testutil { def getTestExecutionEnvironment: StreamExecutionEnvironment = { val env = StreamExecutionEnvironment.getExecutionEnvironment env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime) env.setBufferTimeout(0) env } def copyWithSerializer[T](value: T, serializer: TypeSerializer[T]): T = { val outputStream = new ByteArrayOutputStream() val outputView = new DataOutputViewStreamWrapper(outputStream) serializer.serialize(value, outputView) val bytes = outputStream.toByteArray val inputStream = new ByteArrayInputStream(bytes) val inputView = new DataInputViewStreamWrapper(inputStream) serializer.deserialize(inputView) } def copyData[T](writeValue: DataOutputView => Unit, readValue: DataInputView => T): T = { val outputStream = new ByteArrayOutputStream() val outputView = new DataOutputViewStreamWrapper(outputStream) writeValue(outputView) val bytes = outputStream.toByteArray val inputStream = new ByteArrayInputStream(bytes) val inputView = new DataInputViewStreamWrapper(inputStream) readValue(inputView) } def generateIntKeyValueRecords(recordCount: Int, keyCount: Int, maxValue: Int): List[IntKeyValueRecord] = { val rand = new Random(0) List.tabulate(recordCount)(_ => IntKeyValueRecord(rand.nextInt(keyCount), rand.nextInt(maxValue + 1))) } implicit class WrappedDataStreamExtensions[T >: Null, TKey >: Null <: Product](dataStream: DataStream[RecordWrapper[T, TKey]]) { def unwrap(recordTypeInformation: TypeInformation[T]): DataStream[T] = { val mapper = new UnwrapRecordsMapFunction[T, TKey](recordTypeInformation) this.dataStream.map(mapper) } def unwrap(): DataStream[T] = { val recordType = this.dataStream.getType.asInstanceOf[RecordWrapperTypeInformation[T, TKey]].valueTypeInformation this.unwrap(recordType) } } implicit class DataStreamExtensions[T >: Null](dataStream: DataStream[T]) { def wrap(recordTypeInformation: TypeInformation[T]): DataStream[RecordWrapper[T, Product]] = { val mapper = new WrapRecordsMapFunction[T](recordTypeInformation) this.dataStream.map(mapper) } def wrap(): DataStream[RecordWrapper[T, Product]] = { val recordType = this.dataStream.asInstanceOf[ResultTypeQueryable[T]].getProducedType this.wrap(recordType) } } }
Example 151
Source File: ScalaTypes.scala From xmlrpc with MIT License | 5 votes |
package xmlrpc.protocol import xmlrpc.protocol.Deserializer.Deserialized import scala.language.{postfixOps, implicitConversions} import scala.xml.NodeSeq import scalaz.Scalaz._ trait ScalaTypes extends Protocol { implicit def optionXmlrpc[T: Datatype]: Datatype[Option[T]] = new Datatype[Option[T]] { override def serialize(value: Option[T]): NodeSeq = value match { case Some(a) => toXmlrpc[T](a) case None => NodeSeq.Empty } override def deserialize(from: NodeSeq): Deserialized[Option[T]] = from \\ "value" headOption match { case Some(a) => fromXmlrpc[T](a) map (Some(_)) case None => None.success } } }
Example 152
Source File: XmlrpcResponse.scala From xmlrpc with MIT License | 5 votes |
package xmlrpc import xmlrpc.protocol.Datatype import xmlrpc.protocol.Deserializer.{AnyErrors, Deserialized} import xmlrpc.protocol.XmlrpcProtocol.readXmlResponse import scala.concurrent.{ExecutionContext, Future} import scala.language.implicitConversions import scala.xml.NodeSeq import scalaz.Scalaz._ case class XmlrpcResponse[R](underlying: Future[Deserialized[R]])(implicit ec: ExecutionContext) { import XmlrpcResponse.ToFutureDeserialized def map[S](f: R => S): XmlrpcResponse[S] = flatMap(r => XmlrpcResponse.apply(f(r))) def flatMap[S](f: R => XmlrpcResponse[S]): XmlrpcResponse[S] = XmlrpcResponse[S] { handleErrors flatMap (_ fold (e => e.asFutureFailure, f(_).handleErrors)) } def fold[S](failure: AnyErrors => XmlrpcResponse[S], success: R => S): XmlrpcResponse[S] = XmlrpcResponse[S] { handleErrors flatMap (_ fold (failure(_).handleErrors, r => success(r).asFutureSuccess)) } private lazy val handleErrors: Future[Deserialized[R]] = underlying recover { case error: Throwable => ConnectionError("Error when processing the future response", Some(error)).failures } } object XmlrpcResponse { def apply[R](value: R)(implicit ec: ExecutionContext): XmlrpcResponse[R] = XmlrpcResponse(value.asFutureSuccess) def apply[R](value: Deserialized[R])(implicit ec: ExecutionContext): XmlrpcResponse[R] = XmlrpcResponse[R] { Future.successful(value) } implicit class AkkaHttpToXmlrpcResponse(underlying: Future[NodeSeq])(implicit ec: ExecutionContext) { def asXmlrpcResponse[R: Datatype]: XmlrpcResponse[R] = XmlrpcResponse[R](underlying map readXmlResponse[R]) } implicit class WithRetry[R](f: () => XmlrpcResponse[R])(implicit ec: ExecutionContext) { def retry[S](runFailure: AnyErrors => S, runSuccess: R => S, times: Int): XmlrpcResponse[S] = { def failureLogic(errors: AnyErrors, remaining: Int): XmlrpcResponse[S] = if(remaining == 0) XmlrpcResponse(runFailure(errors)) else retry(runFailure, runSuccess, remaining - 1) def run(remaining: Int): XmlrpcResponse[S] = f() fold (failureLogic(_, remaining), runSuccess) if(times <= 0) throw new IllegalArgumentException("Retry must be executed at least one time.") else run(times) } def retry(times: Int): XmlrpcResponse[R] = { def failureLogic(errors: AnyErrors, remaining: Int): XmlrpcResponse[R] = if(remaining == 0) XmlrpcResponse(errors.asFutureFailure) else retry(remaining - 1) def run(remaining: Int): XmlrpcResponse[R] = f() fold (failureLogic(_, remaining), r => r) if(times <= 0) throw new IllegalArgumentException("Retry must be executed at least one time.") else run(times) } } implicit class ToFutureDeserialized[T](t: T) { def asFutureSuccess = Future.successful(t.success) def asFutureFailure = Future.successful(t.failure) } }
Example 153
Source File: OffsetRange.scala From azure-event-hubs-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.eventhubs.rdd import org.apache.spark.eventhubs.NameAndPartition import org.apache.spark.eventhubs._ import org.apache.spark.streaming.eventhubs.EventHubsDirectDStream import scala.language.implicitConversions object OffsetRange { type OffsetRangeTuple = (NameAndPartition, SequenceNumber, SequenceNumber, Option[String]) def apply(name: String, partitionId: PartitionId, fromSeq: SequenceNumber, untilSeq: SequenceNumber, preferredLoc: Option[String]): OffsetRange = { OffsetRange(NameAndPartition(name, partitionId), fromSeq, untilSeq, preferredLoc) } def apply(nAndP: NameAndPartition, fromSeq: SequenceNumber, untilSeq: SequenceNumber, preferredLoc: Option[String]): OffsetRange = { new OffsetRange(nAndP, fromSeq, untilSeq, preferredLoc) } def apply(tuple: OffsetRangeTuple): OffsetRange = { tupleToOffsetRange(tuple) } implicit def tupleToOffsetRange(tuple: OffsetRangeTuple): OffsetRange = OffsetRange(tuple._1, tuple._2, tuple._3, tuple._4) implicit def tupleListToOffsetRangeList(list: List[OffsetRangeTuple]): List[OffsetRange] = for { tuple <- list } yield tupleToOffsetRange(tuple) }
Example 154
Source File: package.scala From schedoscope with Apache License 2.0 | 5 votes |
package org.schedoscope import org.schedoscope.dsl.transformations.{HiveTransformation, SeqTransformation} import org.schedoscope.dsl.{FieldLike, View} import scala.language.implicitConversions private def getHiveTransformation(st: SeqTransformation[_, _]): Option[HiveTransformation] = st.firstThisTransformation -> st.thenThatTransformation match { case (ht: HiveTransformation, _) => Some(ht) case (_, ht: HiveTransformation) => Some(ht) case (st: SeqTransformation[_, _], _) => getHiveTransformation(st) case (_, st: SeqTransformation[_, _]) => getHiveTransformation(st) case _ => None } } class NoHiveTransformationException extends Exception }
Example 155
Source File: KernelMonad.scala From Scala-for-Machine-Learning-Second-Edition with MIT License | 5 votes |
package org.scalaml.supervised.svm.kernel import org.scalaml.Predef.DblVec import scala.language.implicitConversions import org.scalaml.core.functional._Monad private[scalaml] object KernelMonad { type F1 = Double => Double type F2 = (Double, Double) => Double case class KF[G](g: G, h: F2) { def metric(v: DblVec, w: DblVec)(implicit gf: G => F1): Double = g(v.zip(w).map { case (_v, _w) => h(_v, _w) }.sum) } implicit def hg2KF[G](hg: (G, F2)): KF[G] = KF(hg._1, hg._2) val identity = (x: Double, y: Double) => x * y val kfMonad = new _Monad[KF] { override def unit[G](g: G): KF[G] = KF[G](g, identity) override def map[G, H](kf: KF[G])(f: G => H): KF[H] = KF[H](f(kf.g), kf.h) override def flatMap[G, H](kf: KF[G])(f: G => KF[H]): KF[H] = KF[H](f(kf.g).g, kf.h) } implicit class kF2Monad[G](kf: KF[G]) { def map[H](f: G => H): KF[H] = kfMonad.map(kf)(f) def flatMap[H](f: G => KF[H]): KF[H] = kfMonad.flatMap(kf)(f) } class RBF(s2: Double) extends KF[F1]((x: Double) => Math.exp(-0.5 * x * x / s2), (x: Double, y: Double) => x - y) class Polynomial(d: Int) extends KF[F1]((x: Double) => Math.pow(1.0 + x, d), (x: Double, y: Double) => x * y) } private[scalaml] object KernelMonadApp extends { import KernelMonad._ val v = Vector[Double](0.5, 0.2, 0.3) val w = Vector[Double](0.1, 0.7, 0.2) val composed = for { kf1 <- new RBF(0.6) kf2 <- new Polynomial(6) } yield kf2 composed.metric(v, w) } // ------------------------------- EOF ------------------------------------
Example 156
Source File: RidgeRAdapter.scala From Scala-for-Machine-Learning-Second-Edition with MIT License | 5 votes |
package org.scalaml.libraries.commonsmath import scala.language.implicitConversions import org.apache.commons.math3.stat.regression.AbstractMultipleLinearRegression import org.apache.commons.math3.linear.{RealMatrix, RealVector, QRDecomposition, LUDecomposition} import org.apache.commons.math3.stat.StatUtils import org.apache.commons.math3.stat.descriptive.moment.SecondMoment import org.scalaml.Predef._ import org.scalaml.libraries.commonsmath.CommonsMath._ private def calculateTotalSumOfSquares: Double = if (isNoIntercept) StatUtils.sumSq(getY.toArray) else (new SecondMoment).evaluate(getY.toArray) // Retrieve the residual values from AbstractMultipleLinearRegression class // then compute sum of squared errors using a map and sum. private def rss: Double = { val x: Array[Double] = calculateResiduals x.aggregate(0.0)((s, _x) => s + sqr(_x), _ + _) } } // --------------------------- EOF -----------------------------
Example 157
Source File: resolutionRulesImplicitScope.scala From Learn-Scala-Programming with MIT License | 5 votes |
package ch04 import scala.language.implicitConversions trait ParentA { def name: String } trait ParentB class ChildA(val name: String) extends ParentA with ParentB object ParentB { implicit def a2Char(a: ParentA): Char = a.name.head } object ParentA { implicit def a2Int(a: ParentA): Int = a.hashCode() implicit val ordering = new Ordering[ChildA] { override def compare(a: ChildA, b: ChildA): Int = implicitly[Ordering[String]].compare(a.name, b.name) } } object ChildA { implicit def a2String(a: ParentA): String = a.name } trait Test { def test(a: ChildA) = { val _: Int = a // companion object of ParentA // val _: String = a // companion object of ChildA // val _: Char = a // companion object of ParentB } def constructor[T: Ordering](in: T*): List[T] = in.toList.sorted // companion object of type constructor constructor(new ChildA("A"), new ChildA("B")).sorted // companion object of type parameters }
Example 158
Source File: Boat.scala From Learn-Scala-Programming with MIT License | 5 votes |
package ch09 final case class Boat(direction: Double, position: (Double, Double)) { def go(speed: Float, time: Float): Boat = { val distance = speed * time val (x, y) = position val nx = x + distance * Math.cos(direction) val ny = y + distance * Math.sin(direction) copy(direction, (nx, ny)) } def turn(angle: Double): Boat = copy(direction = (this.direction + angle) % (2 * Math.PI)) } import scala.language.{higherKinds, implicitConversions} object Boat { val boat = Boat(0, (0d, 0d)) import Monad.lowPriorityImplicits._ def go[M[_]: Monad]: (Float, Float) => Boat => M[Boat] = (speed, time) => boat => Monad[M].unit(boat.go(speed, time)) def turn[M[_]: Monad]: Double => Boat => M[Boat] = angle => boat => Monad[M].unit(boat.turn(angle)) def move[A, M[_]: Monad](go: (Float, Float) => A => M[A], turn: Double => A => M[A])(boat: M[A]): M[A] = for { a <- boat b <- go(10,5)(a) c <- turn(0.5)(b) d <- go(20, 20)(c) e <- turn(-0.1)(d) f <- go(1,1)(e) } yield f }
Example 159
Source File: MyLogger.scala From Squerall with Apache License 2.0 | 5 votes |
package org.squerall import org.slf4j.{Logger, LoggerFactory} import scala.language.implicitConversions trait MyLogger { @transient private var log_ : Logger = _ // Method to get or create the logger for this object protected def log: Logger = { if (log_ == null) { log_ = LoggerFactory.getLogger(logName) } log_ } // Method to get the logger name for this object protected def logName: String = { // Ignore trailing $'s in the class names for Scala objects this.getClass.getName.stripSuffix("$") } def trace(msg: => String): Unit = { if (log.isTraceEnabled) log.trace(msg) } def trace(msg: => String, e: Throwable): Unit = { if (log.isTraceEnabled) log.trace(msg, e) } def trace(msg: => String, o: Any, os: Any*): Unit = { if (log.isTraceEnabled) log.trace(msg, o, os) } def debug(msg: => String): Unit = { if (log.isDebugEnabled) log.debug(msg) } def debug(msg: => String, e: Throwable): Unit = { if (log.isDebugEnabled) log.debug(msg, e) } def debug(msg: => String, o: Any, os: Any*): Unit = { if (log.isDebugEnabled) log.debug(msg, o, os) } def info(msg: => String): Unit = { if (log.isInfoEnabled) log.info(msg) } def info(msg: => String, e: Throwable): Unit = { if (log.isInfoEnabled) log.info(msg, e) } def info(msg: => String, o: Any, os: Any*): Unit = { if (log.isInfoEnabled) log.info(msg, o, os) } def warn(msg: => String): Unit = { if (log.isWarnEnabled) log.warn(msg) } def warn(msg: => String, e: Throwable): Unit = { if (log.isWarnEnabled) log.warn(msg, e) } def warn(msg: => String, o: Any, os: Any*): Unit = { if (log.isWarnEnabled) log.warn(msg, o, os) } def error(msg: => String): Unit = { if (log.isErrorEnabled) log.error(msg) } def error(msg: => String, e: Throwable): Unit = { if (log.isErrorEnabled) log.error(msg, e) } def error(msg: => String, o: Any, os: Any*): Unit = { if (log.isErrorEnabled) log.error(msg, o, os) } def mark(msg: => String): Unit = { if (log.isErrorEnabled) log.error(msg) } def mark(msg: => String, e: Throwable): Unit = { if (log.isErrorEnabled) log.error(msg, e) } def mark(msg: => String, o: Any, os: Any*): Unit = { if (log.isErrorEnabled) log.error(msg, o, os) } } private object MyLogger { implicit def logging2Logger(anything: MyLogger): Logger = anything.log }
Example 160
Source File: Fractional.scala From perf_tester with Apache License 2.0 | 5 votes |
package scala package math import scala.language.implicitConversions trait Fractional[T] extends Numeric[T] { def div(x: T, y: T): T class FractionalOps(lhs: T) extends Ops(lhs) { def /(rhs: T) = div(lhs, rhs) } override implicit def mkNumericOps(lhs: T): FractionalOps = new FractionalOps(lhs) } object Fractional { @inline def apply[T](implicit frac: Fractional[T]): Fractional[T] = frac trait ExtraImplicits { implicit def infixFractionalOps[T](x: T)(implicit num: Fractional[T]): Fractional[T]#FractionalOps = new num.FractionalOps(x) } object Implicits extends ExtraImplicits }
Example 161
Source File: SystemProperties.scala From perf_tester with Apache License 2.0 | 5 votes |
package scala package sys import scala.collection.{ mutable, Iterator } import scala.collection.JavaConverters._ import java.security.AccessControlException import scala.language.implicitConversions def exclusively[T](body: => T) = this synchronized body implicit def systemPropertiesToCompanion(p: SystemProperties): SystemProperties.type = this private final val HeadlessKey = "java.awt.headless" private final val PreferIPv4StackKey = "java.net.preferIPv4Stack" private final val PreferIPv6AddressesKey = "java.net.preferIPv6Addresses" private final val NoTraceSuppressionKey = "scala.control.noTraceSuppression" def help(key: String): String = key match { case HeadlessKey => "system should not utilize a display device" case PreferIPv4StackKey => "system should prefer IPv4 sockets" case PreferIPv6AddressesKey => "system should prefer IPv6 addresses" case NoTraceSuppressionKey => "scala should not suppress any stack trace creation" case _ => "" } lazy val headless: BooleanProp = BooleanProp.keyExists(HeadlessKey) lazy val preferIPv4Stack: BooleanProp = BooleanProp.keyExists(PreferIPv4StackKey) lazy val preferIPv6Addresses: BooleanProp = BooleanProp.keyExists(PreferIPv6AddressesKey) lazy val noTraceSuppression: BooleanProp = BooleanProp.valueIsTrue(NoTraceSuppressionKey) }
Example 162
Source File: package.scala From perf_tester with Apache License 2.0 | 5 votes |
package scala.concurrent import scala.language.implicitConversions package object duration { implicit final class IntMult(private val i: Int) extends AnyVal { def *(d: Duration) = d * i.toDouble def *(d: FiniteDuration) = d * i.toLong } implicit final class LongMult(private val i: Long) extends AnyVal { def *(d: Duration) = d * i.toDouble def *(d: FiniteDuration) = d * i.toLong } implicit final class DoubleMult(private val f: Double) extends AnyVal { def *(d: Duration) = d * f.toDouble } }
Example 163
Source File: Searching.scala From perf_tester with Apache License 2.0 | 5 votes |
package scala.collection import scala.language.implicitConversions import scala.math.Ordering import scala.collection.generic.IsSeqLike object Searching { sealed abstract class SearchResult { def insertionPoint: Int } case class Found(foundIndex: Int) extends SearchResult { override def insertionPoint = foundIndex } case class InsertionPoint(insertionPoint: Int) extends SearchResult @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") class SearchImpl[Repr, A](private val coll: SeqOps[A, AnyConstr, _]) extends AnyVal @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") implicit def search[Repr, A](coll: Repr)(implicit fr: IsSeqLike[Repr]): SearchImpl[Repr, fr.A] = new SearchImpl(fr.conversion(coll)) }
Example 164
Source File: HttpTest.scala From scalajs-angulate with MIT License | 5 votes |
// - Project: scalajs-angulate (https://github.com/jokade/scalajs-angulate) // Description: // // Distributed under the MIT License (see included file LICENSE) package test import biz.enef.angulate._ import biz.enef.angulate.core.HttpService import utest._ import scala.scalajs.js import scala.util.{Failure, Success} import js.Dynamic.literal import scala.language.implicitConversions import AnnotatedFunction._ import Module._ object HttpTest extends AngulateTestSuite { override val tests = TestSuite { implicit val module = angular.createModule("test", Seq("ngMockE2E")) module.run ( ($httpBackend: js.Dynamic) => { $httpBackend.whenGET("/ok").respond( literal(id = 200) ) $httpBackend.whenGET("/error").respond(404,"resource not found") $httpBackend.whenPOST("/empty").respond(204) }) val $http = dependency[HttpService]("$http") var ok = false 'onComplete-{ 'success-{ val p = promise() $http.get[Data]("/ok").onComplete { case Success(data) => p.assert( data.id == 200 ) case x => p.fail() } p.future } 'failure-{ val p = promise() $http.get[Data]("/error").onComplete { case Success(_) => p.fail() case Failure(ex) => p.ok() } p.future } 'expectEmptyResponse-{ val p = promise() $http.post[Unit]("/empty").onComplete{ case Success(x) => p.ok() case _ => p.fail() } p.future } } 'onSuccess-{ 'success-{ val p = promise() $http.get[Data]("/ok").onSuccess { data => p.assert( data.id == 200 ) } p.future } 'expectEmptyResponse-{ val p = promise() $http.post[Unit]("/empty").onSuccess( x => p.ok() ) p.future } } } trait Data extends js.Object { def id: Int = js.native } }
Example 165
Source File: Resource.scala From hyperion with Apache License 2.0 | 5 votes |
package com.krux.hyperion.resource import scala.language.implicitConversions sealed trait Resource[+T] { def asWorkerGroup: Option[WorkerGroup] def asManagedResource: Option[T] def toSeq: Seq[T] } sealed class WorkerGroupResource[T](wg: WorkerGroup) extends Resource[T] { def asWorkerGroup: Option[WorkerGroup] = Option(wg) def asManagedResource: Option[T] = None def toSeq: Seq[T] = Seq.empty } sealed class ManagedResource[T](resource: T) extends Resource[T] { def asWorkerGroup: Option[WorkerGroup] = None def asManagedResource: Option[T] = Option(resource) def toSeq: Seq[T] = Seq(resource) } object Resource { def apply[T](wg: WorkerGroup): Resource[T] = new WorkerGroupResource(wg) def apply[T](resource: T): Resource[T] = new ManagedResource(resource) implicit def workerGroupToWorkerGroupResource[T](workerGroup: WorkerGroup): Resource[T] = Resource(workerGroup) implicit def resourceToWorkerGroupResource[T](resource: T): Resource[T] = Resource(resource) }
Example 166
Source File: package.scala From hyperion with Apache License 2.0 | 5 votes |
package com.krux.hyperion import scala.language.implicitConversions import java.time.ZonedDateTime import com.krux.hyperion.common.S3Uri import com.krux.hyperion.expression.Duration package object adt { // somehow the following does not work in all situations: // implicit def seqNative2SeqHType[B <: HType, A <% B](x: Seq[A]): Seq[B] = x.map(xx => xx: B) // since the generic one does not work we have to write all supported ones explicitly implicit def seqString2SeqHString(x: Seq[String]): Seq[HString] = x.map(xx => xx: HString) implicit def seqInt2SeqHInt(x: Seq[Int]): Seq[HInt] = x.map(xx => xx: HInt) implicit def seqDouble2SeqHDouble(x: Seq[Double]): Seq[HDouble] = x.map(xx => xx: HDouble) implicit def seqBoolean2SeqHBoolean(x: Seq[Boolean]): Seq[HBoolean] = x.map(xx => xx: HBoolean) implicit def seqDateTime2SeqHDateTime(x: Seq[ZonedDateTime]): Seq[HDateTime] = x.map(xx => xx: HDateTime) implicit def seqDuration2SeqHDuration(x: Seq[Duration]): Seq[HDuration] = x.map(xx => xx: HDuration) implicit def seqS3Uri2SeqHS3Uri(x: Seq[S3Uri]): Seq[HS3Uri] = x.map(xx => xx: HS3Uri) implicit def seqLong2SeqHLong(x: Seq[Long]): Seq[HLong] = x.map(xx => xx: HLong) }
Example 167
Source File: S3DataNode.scala From hyperion with Apache License 2.0 | 5 votes |
package com.krux.hyperion.datanode import scala.language.implicitConversions import com.krux.hyperion.aws.AdpS3DataNode import com.krux.hyperion.common.S3Uri import com.krux.hyperion.common.PipelineObject import com.krux.hyperion.dataformat.DataFormat import com.krux.hyperion.adt.{HS3Uri, HBoolean} import com.krux.hyperion.common.{ BaseFields, PipelineObjectId } sealed trait S3DataNode extends Copyable { type Self <: S3DataNode def s3DataNodeFields: S3DataNodeFields def updateS3DataNodeFields(fields: S3DataNodeFields): Self def dataFormat = s3DataNodeFields.dataFormat def withDataFormat(fmt: DataFormat): Self = updateS3DataNodeFields( s3DataNodeFields.copy(dataFormat = Option(fmt)) ) def asInput(): String = asInput(1) def asInput(n: Integer): String = "${" + s"INPUT${n}_STAGING_DIR}" def asOutput(): String = asOutput(1) def asOutput(n: Integer): String = "${" + s"OUTPUT${n}_STAGING_DIR}" def manifestFilePath = s3DataNodeFields.manifestFilePath def withManifestFilePath(path: HS3Uri): Self = updateS3DataNodeFields( s3DataNodeFields.copy(manifestFilePath = Option(path)) ) def isCompressed = s3DataNodeFields.isCompressed def compressed: Self = updateS3DataNodeFields( s3DataNodeFields.copy(isCompressed = HBoolean.True) ) def isEncrypted = s3DataNodeFields.isEncrypted def unencrypted: Self = updateS3DataNodeFields( s3DataNodeFields.copy(isEncrypted = HBoolean.False) ) override def objects: Iterable[PipelineObject] = dataFormat ++ super.objects } object S3DataNode { def apply(s3Path: S3Uri): S3DataNode = if (s3Path.ref.endsWith("/")) S3Folder(s3Path) else S3File(s3Path) implicit def string2S3DataNode(s3path: String): S3DataNode = S3DataNode(S3Uri(s3path)) } case class S3Folder private( baseFields: BaseFields, dataNodeFields: DataNodeFields, s3DataNodeFields: S3DataNodeFields, directoryPath: HS3Uri ) extends S3DataNode { type Self = S3Folder def updateBaseFields(fields: BaseFields) = copy(baseFields = fields) def updateDataNodeFields(fields: DataNodeFields) = copy(dataNodeFields = fields) def updateS3DataNodeFields(fields: S3DataNodeFields) = copy(s3DataNodeFields = fields) override def toString = directoryPath.toString lazy val serialize = AdpS3DataNode( id = id, name = name, directoryPath = Option(directoryPath.serialize), filePath = None, dataFormat = dataFormat.map(_.ref), manifestFilePath = manifestFilePath.map(_.serialize), compression = if (isCompressed) Option("gzip") else None, s3EncryptionType = if (isEncrypted) None else Option("NONE"), precondition = seqToOption(preconditions)(_.ref), onSuccess = seqToOption(onSuccessAlarms)(_.ref), onFail = seqToOption(onFailAlarms)(_.ref) ) } object S3Folder { def apply(directoryPath: HS3Uri): S3Folder = new S3Folder( baseFields = BaseFields(PipelineObjectId(S3Folder.getClass)), dataNodeFields = DataNodeFields(), s3DataNodeFields = S3DataNodeFields(), directoryPath = directoryPath ) }
Example 168
Source File: Script.scala From hyperion with Apache License 2.0 | 5 votes |
package com.krux.hyperion.activity import scala.language.implicitConversions import com.krux.hyperion.adt.{ HString, HS3Uri } import com.krux.hyperion.common.S3Uri sealed trait Script { def uri: Option[HS3Uri] def content: Option[HString] } sealed case class ScriptUri(uri: Option[HS3Uri]) extends Script { def content: Option[HString] = None } sealed case class ScriptContent(content: Option[HString]) extends Script { def uri: Option[HS3Uri] = None } object Script { def apply(uri: HS3Uri): Script = ScriptUri(Option(uri)) def apply(content: HString): Script = ScriptContent(Option(content)) implicit def s3Uri2Script(uri: S3Uri): Script = Script(uri) implicit def string2Script(content: String): Script = Script(content) }
Example 169
Source File: MainClass.scala From hyperion with Apache License 2.0 | 5 votes |
package com.krux.hyperion.activity import scala.language.implicitConversions import com.krux.hyperion.adt.HString class MainClass private (name: String) { override def toString = name val fullName: String = name val simpleName: String = name.split('.').last } object MainClass { implicit def hStringToMainClass(s: HString): MainClass = MainClass(s.toString) implicit def stringToMainClass(s: String): MainClass = MainClass(s) implicit def classToMainClass(c: Class[_]): MainClass = MainClass(c) implicit def anyToMainClass(a: Any): MainClass = MainClass(a) def apply(mainClass: Any): MainClass = mainClass match { case s: String => new MainClass(s.stripSuffix("$")) case c: Class[_] => apply(c.getCanonicalName) case mc => apply(mc.getClass) } }
Example 170
Source File: RabbitMQDistributedInput.scala From sparta with Apache License 2.0 | 5 votes |
package com.stratio.sparta.plugin.input.rabbitmq import java.io.{Serializable => JSerializable} import com.stratio.sparta.plugin.input.rabbitmq.handler.MessageHandler import com.stratio.sparta.sdk.pipeline.input.Input import com.stratio.sparta.sdk.properties.ValidatingPropertyMap._ import org.apache.spark.sql.Row import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.dstream.DStream import org.apache.spark.streaming.rabbitmq.RabbitMQUtils._ import org.apache.spark.streaming.rabbitmq.distributed.RabbitMQDistributedKey import org.apache.spark.streaming.rabbitmq.models.ExchangeAndRouting import scala.language.implicitConversions import scala.util.Try object RabbitMQDistributedInput { //Keys from UI val DistributedPropertyKey = "distributedProperties" val QueuePropertyKey = "distributedQueue" val ExchangeNamePropertyKey = "distributedExchangeName" val ExchangeTypePropertyKey = "distributedExchangeType" val RoutingKeysPropertyKey = "distributedRoutingKeys" val HostPropertyKey = "hosts" //Default values val QueueDefaultValue = "queue" val HostDefaultValue = "localhost" } class RabbitMQDistributedInput(properties: Map[String, JSerializable]) extends Input(properties) with RabbitMQGenericProps { import RabbitMQDistributedInput._ def initStream(ssc: StreamingContext, sparkStorageLevel: String): DStream[Row] = { val messageHandler = MessageHandler(properties).handler val params = propsWithStorageLevel(sparkStorageLevel) createDistributedStream(ssc, getKeys(params), params, messageHandler) } def getKeys(rabbitMQParams: Map[String, String]): Seq[RabbitMQDistributedKey] = { val items = Try(properties.getMapFromJsoneyString(DistributedPropertyKey)) .getOrElse(Seq.empty[Map[String, String]]) for (item <- items) yield getKey(item, rabbitMQParams) } def getKey(params: Map[String, String], rabbitMQParams: Map[String, String]): RabbitMQDistributedKey = { val exchangeAndRouting = ExchangeAndRouting( params.get(ExchangeNamePropertyKey).notBlank, params.get(ExchangeTypePropertyKey).notBlank, params.get(RoutingKeysPropertyKey).notBlank ) val hosts = HostPropertyKey -> params.get(HostPropertyKey).notBlankWithDefault(HostDefaultValue) val queueName = params.get(QueuePropertyKey).notBlankWithDefault(QueueDefaultValue) RabbitMQDistributedKey( queueName, exchangeAndRouting, rabbitMQParams + hosts ) } }
Example 171
Source File: ByteArrays.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.util import java.security.MessageDigest import okio.ByteString import org.apache.commons.codec.binary.Base64 import scala.annotation.tailrec import scala.language.implicitConversions object ByteArrays { object Implicits { implicit def byteArrayToByteString(arr: Array[Byte]): ByteString = new ByteString(arr) implicit def byteStringToByteArray(byteString: ByteString): Array[Byte] = byteString.toByteArray } def paddedByteArray(bs: Array[Byte], length: Int): Array[Byte] = { val padded = Array.ofDim[Byte](math.max(length, bs.length)) System.arraycopy(bs, 0, padded, 0, bs.length) padded } def paddedByteArray(s: String, length: Int): Array[Byte] = paddedByteArray(s.getBytes("US-ASCII"), length) def paddedByteArrayToString(bs: Array[Byte]): String = new String(bs, "US-ASCII").split("\u0000")(0) def trimmedByteArray(bs: Array[Byte]): Seq[Byte] = trimmedByteArray(bs.toIndexedSeq) def trimmedByteArray(bs: Seq[Byte]): Seq[Byte] = bs.reverse.dropWhile(_ == 0).reverse def sha256(bs: Array[Byte]): Array[Byte] = sha256(bs.toIndexedSeq) def sha256(bs: Seq[Byte]): Array[Byte] = { val md = MessageDigest.getInstance("SHA-256") md.update(bs.toArray) md.digest } def base64(bs: Seq[Byte]): String = base64(bs.toArray) def base64(bs: Array[Byte]): String = Base64.encodeBase64String(bs) def base64(s: String): Array[Byte] = Base64.decodeBase64(s) def bytesToHex(bs: Array[Byte]): String = bytesToHex(bs.toIndexedSeq) def bytesToHex(bs: Seq[Byte]): String = bs.map("%02X".format(_)).mkString def hexToBytes(hex: String): Seq[Byte] = hex.toSeq.sliding(2, 2).map(_.unwrap).map(Integer.parseInt(_, 16).toByte).toIndexedSeq def checksum(bytes: Array[Byte]): Array[Byte] = { // This code calculates CRC16-XModem checksum // Ported from https://github.com/alexgorbatchev/node-crc, via https://github.com/stellar/java-stellar-sdk @tailrec def loop(bs: Seq[Byte], crc: Int): Int = { bs match { case h +: t => var code = crc >>> 8 & 0xFF code ^= h & 0xFF code ^= code >>> 4 var crc_ = crc << 8 & 0xFFFF crc_ ^= code code = code << 5 & 0xFFFF crc_ ^= code code = code << 7 & 0xFFFF crc_ ^= code loop(t, crc_) case Nil => crc } } val crc = loop(bytes.toIndexedSeq, 0x0000) Array(crc.toByte, (crc >>> 8).toByte) } }
Example 172
Source File: HadoopConfig.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.streaming.examples.fsio import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream} import scala.language.implicitConversions import org.apache.hadoop.conf.Configuration import org.apache.gearpump.cluster.UserConfig import org.apache.gearpump.util.Constants._ class HadoopConfig(config: UserConfig) { def withHadoopConf(conf: Configuration): UserConfig = { config.withBytes(HADOOP_CONF, serializeHadoopConf(conf)) } def hadoopConf: Configuration = deserializeHadoopConf(config.getBytes(HADOOP_CONF).get) private def serializeHadoopConf(conf: Configuration): Array[Byte] = { val out = new ByteArrayOutputStream() val dataOut = new DataOutputStream(out) conf.write(dataOut) dataOut.close() out.toByteArray } private def deserializeHadoopConf(bytes: Array[Byte]): Configuration = { val in = new ByteArrayInputStream(bytes) val dataIn = new DataInputStream(in) val result = new Configuration() result.readFields(dataIn) dataIn.close() result } } object HadoopConfig { def empty: HadoopConfig = new HadoopConfig(UserConfig.empty) def apply(config: UserConfig): HadoopConfig = new HadoopConfig(config) implicit def userConfigToHadoopConfig(userConf: UserConfig): HadoopConfig = { HadoopConfig(userConf) } }
Example 173
Source File: HBaseDSLSink.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.external.hbase.dsl import scala.language.implicitConversions import org.apache.gearpump.cluster.UserConfig import org.apache.gearpump.external.hbase.HBaseSink import org.apache.gearpump.streaming.dsl.scalaapi.Stream class HBaseDSLSink[T](stream: Stream[T]) { def writeToHbase(userConfig: UserConfig, table: String, parallelism: Int, description: String): Stream[T] = { stream.sink(HBaseSink[T](userConfig, table), parallelism, userConfig, description) } } object HBaseDSLSink { implicit def streamToHBaseDSLSink[T](stream: Stream[T]): HBaseDSLSink[T] = { new HBaseDSLSink[T](stream) } }
Example 174
Source File: KafkaDSL.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.streaming.kafka.dsl import java.util.Properties import org.apache.gearpump.cluster.UserConfig import org.apache.gearpump.streaming.dsl.scalaapi.{Stream, StreamApp} import org.apache.gearpump.streaming.kafka.{KafkaSink, KafkaSource} import org.apache.gearpump.streaming.transaction.api.CheckpointStoreFactory object KafkaDSL { def writeToKafka( topic: String, properties: Properties, parallelism: Int = 1, userConfig: UserConfig = UserConfig.empty, description: String = "KafkaSink"): Stream[T] = { stream.sink(new KafkaSink(topic, properties), parallelism, userConfig, description) } }
Example 175
Source File: StreamApp.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.streaming.dsl.scalaapi import java.time.Instant import akka.actor.ActorSystem import org.apache.gearpump.Message import org.apache.gearpump.cluster.UserConfig import org.apache.gearpump.cluster.client.ClientContext import org.apache.gearpump.streaming.StreamApplication import org.apache.gearpump.streaming.dsl.plan._ import org.apache.gearpump.streaming.source.{DataSource, Watermark} import org.apache.gearpump.streaming.task.TaskContext import org.apache.gearpump.util.Graph import scala.language.implicitConversions class CollectionDataSource[T](seq: Seq[T]) extends DataSource { private lazy val iterator: Iterator[T] = seq.iterator override def open(context: TaskContext, startTime: Instant): Unit = {} override def read(): Message = { if (iterator.hasNext) { Message(iterator.next(), Instant.now()) } else { null } } override def close(): Unit = {} override def getWatermark: Instant = { if (iterator.hasNext) { Instant.now() } else { Watermark.MAX } } }
Example 176
Source File: ClusterConfigSource.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.cluster import java.io.File import scala.language.implicitConversions import com.typesafe.config.{Config, ConfigFactory, ConfigParseOptions} def apply(filePath: String): ClusterConfigSource = { if (null == filePath) { new ClusterConfigSourceImpl(ConfigFactory.empty()) } else { var config = ConfigFactory.parseFileAnySyntax(new File(filePath), ConfigParseOptions.defaults.setAllowMissing(true)) if (null == config || config.isEmpty) { config = ConfigFactory.parseResourcesAnySyntax(filePath, ConfigParseOptions.defaults.setAllowMissing(true)) } new ClusterConfigSourceImpl(config) } } implicit def FilePathToClusterConfigSource(filePath: String): ClusterConfigSource = { apply(filePath) } private class ClusterConfigSourceImpl(config: Config) extends ClusterConfigSource { override def getConfig: Config = config } }
Example 177
Source File: TestProbeUtil.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump import scala.language.implicitConversions import akka.actor.{Actor, Props, Terminated} import akka.testkit.TestProbe object TestProbeUtil { implicit def toProps(probe: TestProbe): Props = { Props(new Actor { val probeRef = probe.ref context.watch(probeRef) def receive: Receive = { case Terminated(probeRef) => context.stop(self) case x => probeRef.forward(x) } }) } }
Example 178
Source File: ExecuteReply.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.content // External libraries import org.apache.toree.kernel.protocol.v5.{KernelMessageContent, UserExpressions, Payloads} import play.api.libs.json._ // Internal libraries import scala.language.implicitConversions case class ExecuteReply( status: String, execution_count: Int, payload: Option[Payloads], user_expressions: Option[UserExpressions], ename: Option[String], evalue: Option[String], traceback: Option[List[String]] ) extends KernelMessageContent { override def content : String = Json.toJson(this)(ExecuteReply.executeReplyWrites).toString } object ExecuteReply extends TypeString { implicit val executeReplyReads = Json.reads[ExecuteReply] implicit val executeReplyWrites = Json.writes[ExecuteReply] implicit def ExecuteReplyToString(executeReply: ExecuteReply): String ={ Json.toJson(executeReply).toString } override def toTypeString: String = "execute_reply" }
Example 179
Source File: ErrorContent.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.content import org.apache.toree.kernel.protocol.v5.KernelMessageContent import play.api.libs.json.Json import scala.language.implicitConversions case class ErrorContent( ename: String, evalue: String, traceback: List[String] ) extends KernelMessageContent{ override def content : String = Json.toJson(this)(ErrorContent.errorContentWrites).toString } object ErrorContent extends TypeString { implicit val errorContentReads = Json.reads[ErrorContent] implicit val errorContentWrites = Json.writes[ErrorContent] implicit def ErrorContentToString(errorContent: ErrorContent): String ={ Json.toJson(errorContent).toString } override def toTypeString: String = "error" }
Example 180
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.content.ExecuteRequest import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions private val sessionId: UUID = java.util.UUID.randomUUID().toString implicit val timeout = Timeout(21474835.seconds) // Maximum delay implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] val parentHeader = Json.parse(message.frames(delimiterIndex + 3)).validate[ParentHeader].fold[ParentHeader]( // TODO: Investigate better solution than setting parentHeader to null for {} (invalid: Seq[(JsPath, Seq[ValidationError])]) => null, //HeaderBuilder.empty, (valid: ParentHeader) => valid ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T](json: String, reads: Reads[T], handler: T => Unit) : Unit = { Json.parse(json).validate[T](reads).fold( (invalid: Seq[(JsPath, Seq[ValidationError])]) => logger.error(s"Could not parse JSON, ${json}"), (content: T) => handler(content) ) } def getSessionId = sessionId def toKernelMessage(message: ExecuteRequest): KernelMessage = { // construct a kernel message whose content is an ExecuteRequest val id = java.util.UUID.randomUUID().toString val header = Header( id, "spark", sessionId, MessageType.Incoming.ExecuteRequest.toString, "5.0") KMBuilder().withIds(Seq[Array[Byte]]()).withSignature("").withHeader(header) .withParentHeader(HeaderBuilder.empty).withContentString(message).build } }
Example 181
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions implicit val timeout = Timeout(21474835.seconds) implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] // TODO: Investigate better solution than setting parentHeader to null for {} val parentHeader = parseAndHandle(message.frames(delimiterIndex + 3), ParentHeader.headerReads, handler = (valid: ParentHeader) => valid, errHandler = _ => null ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U) : U = { parseAndHandle(json, reads, handler, (invalid: Seq[(JsPath, Seq[ValidationError])]) => { logger.error(s"Could not parse JSON, ${json}") throw new Throwable(s"Could not parse JSON, ${json}") } ) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U, errHandler: Seq[(JsPath, Seq[ValidationError])] => U) : U = { Json.parse(json).validate[T](reads).fold( errHandler, (content: T) => handler(content) ) } }
Example 182
Source File: ArgumentParsingSupport.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.utils import joptsimple.{OptionSpec, OptionParser} import scala.collection.JavaConverters._ import scala.language.implicitConversions import java.io.{PrintStream, OutputStream} trait ArgumentParsingSupport { protected lazy val parser = new OptionParser() private var options: joptsimple.OptionSet = _ parser.allowsUnrecognizedOptions() def parseArgs(args: String, delimiter: String = " ") = { options = parser.parse(args.split(delimiter): _*) options.nonOptionArguments().asScala.map(_.toString) } def printHelp(outputStream: OutputStream, usage: String) = { val printStream = new PrintStream(outputStream) printStream.println(s"Usage: $usage\n") parser.printHelpOn(outputStream) } implicit def has[T](spec: OptionSpec[T]): Boolean = { require(options != null, "Arguments not parsed yet!") options.has(spec) } implicit def get[T](spec: OptionSpec[T]): Option[T] = { require(options != null, "Arguments not parsed yet!") Some(options.valueOf(spec)).filter(_ != null) } // NOTE: Cannot be implicit as conflicts with get def getAll[T](spec: OptionSpec[T]): Option[List[T]] = { require(options != null, "Arguments not parsed yet!") Some(options.valuesOf(spec).asScala.toList).filter(_ != null) } }
Example 183
Source File: StreamingMLUtils.scala From spark-structured-streaming-ml with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib import scala.language.implicitConversions import org.apache.spark.ml.linalg.{SparseVector, DenseVector, Vector} import org.apache.spark.mllib.linalg.{Vector => OldVector, Vectors => OldVectors} import org.apache.spark.mllib.util.MLUtils object StreamingMLUtils { implicit def mlToMllibVector(v: Vector): OldVector = v match { case dv: DenseVector => OldVectors.dense(dv.toArray) case sv: SparseVector => OldVectors.sparse(sv.size, sv.indices, sv.values) case _ => throw new IllegalArgumentException } def fastSquaredDistance(x: Vector, xNorm: Double, y: Vector, yNorm: Double) = { MLUtils.fastSquaredDistance(x, xNorm, y, yNorm) } }
Example 184
Source File: Target.scala From protoc-bridge with Apache License 2.0 | 5 votes |
package protocbridge import java.io.File case class Target( generator: Generator, outputPath: File, options: Seq[String] = Seq.empty ) object Target { import scala.language.implicitConversions def builtin(name: String, options: Seq[String] = Seq.empty) = (BuiltinGenerator(name), options) def apply( generatorAndOpts: (Generator, Seq[String]), outputPath: File ): Target = { apply(generatorAndOpts._1, outputPath, generatorAndOpts._2) } implicit def generatorOptsFileTupleToTarget( s: ((Generator, Seq[String]), File) ): Target = Target(s._1, s._2) implicit def generatorFileTupleToTarget(s: (Generator, File)): Target = Target(s._1, s._2) implicit def protocCodeGeneratorFile(s: (ProtocCodeGenerator, File)): Target = Target(s._1, s._2) implicit def protocCodeGeneratorOptsFile( s: ((ProtocCodeGenerator, Seq[String]), File) ): Target = Target(ProtocCodeGenerator.toGenerator(s._1._1), s._2, s._1._2) }
Example 185
Source File: GetAddresses.scala From bitcoin-s with MIT License | 5 votes |
package org.bitcoins.wallet.util import org.bitcoins.core.hd._ import play.api.libs.json._ import scala.sys.process._ ) accountIndex <- 0 until 3 } yield { val accountPath = BIP32Path( BIP32Node(constant.constant, hardened = true), BIP32Node(coin.toInt, hardened = true), BIP32Node(accountIndex, hardened = true) ) val pathType = constant match { case HDPurposes.Legacy => "legacy" case HDPurposes.NestedSegWit => "p2sh-segwit" case HDPurposes.SegWit => "segwit" case other => throw new RuntimeException(s"Unexpected purpose $other") } val trezorPathType = constant match { case HDPurposes.Legacy => "address" case HDPurposes.NestedSegWit => "p2shsegwit" case HDPurposes.SegWit => "segwit" case other => throw new RuntimeException(s"Unexpected purpose $other") } val xpubCmd = s"""trezorctl get-public-node -n $accountPath -t $trezorPathType""" printerr(s"Executing cmd: $xpubCmd") val xpub = xpubCmd.!!.split("\n").last.split(": ").last val addresses = for { chainType <- List[HDChainType](HDChainType.Change, HDChainType.External) addressIndex <- 0 until 3 } yield { val path = BIP32Path( BIP32Node(constant.constant, hardened = true), BIP32Node(coin.toInt, hardened = true), BIP32Node(accountIndex, hardened = true), BIP32Node(chainType.index, hardened = false), BIP32Node(addressIndex, hardened = false) ) val addressCmd = s"trezorctl get-address -n $path -t $trezorPathType" printerr(s"Executing cmd: $addressCmd") val address = addressCmd.!!.split("\n").head val json = Json.toJson( Map[String, JsValue]( "path" -> path.toString, "chain" -> chainType.toString, "addressIndex" -> addressIndex, "address" -> address ) ) json } val json = JsObject( Map[String, JsValue]( "coin" -> coin.toString, "pathType" -> pathType, "account" -> accountIndex, "xpub" -> xpub, "addresses" -> addresses ) ) json } println(Json.stringify(JsArray(accountInfo))) }
Example 186
Source File: DbManagement.scala From bitcoin-s with MIT License | 5 votes |
package org.bitcoins.db import org.bitcoins.core.util.{BitcoinSLogger, FutureUtil} import org.flywaydb.core.Flyway import org.flywaydb.core.api.FlywayException import scala.concurrent.{ExecutionContext, Future} trait DbManagement extends BitcoinSLogger { _: JdbcProfileComponent[AppConfig] => import profile.api._ import scala.language.implicitConversions def migrate(): Int = { val module = appConfig.moduleName val config = Flyway .configure() .locations(s"classpath:${driverName}/${module}/migration/") val flyway = config.dataSource(jdbcUrl, username, password).load try { flyway.migrate() } catch { case err: FlywayException => logger.warn( s"Failed to apply first round of migrations, attempting baseline and re-apply", err) //maybe we have an existing database, so attempt to baseline the existing //database and then apply migrations again flyway.baseline() flyway.migrate() } } }
Example 187
Source File: TestUtils.scala From spark-monitoring with MIT License | 5 votes |
package org.apache.spark.metrics import org.mockito.ArgumentMatcher import scala.reflect.ClassTag object TestImplicits { import scala.language.implicitConversions implicit def matcher[T](f: (T) => Boolean): ArgumentMatcher[T] = new ArgumentMatcher[T]() { def matches(o: Any): Boolean = f(o.asInstanceOf[T]) } } object TestUtils { def getField[T: ClassTag](fieldName: String): java.lang.reflect.Field = { val field = scala.reflect.classTag[T].runtimeClass.getDeclaredField(fieldName) field.setAccessible(true) field } }
Example 188
Source File: AsynchronousLogHandler.scala From scribe with MIT License | 5 votes |
package scribe.handler import java.util.concurrent.ConcurrentLinkedQueue import java.util.concurrent.atomic.AtomicLong import scribe.LogRecord import scribe.format.Formatter import scribe.modify.LogModifier import scribe.writer.{ConsoleWriter, Writer} import perfolation._ import scala.language.implicitConversions case class AsynchronousLogHandler(formatter: Formatter = Formatter.default, writer: Writer = ConsoleWriter, modifiers: List[LogModifier] = Nil, maxBuffer: Int = AsynchronousLogHandler.DefaultMaxBuffer, overflow: Overflow = Overflow.DropOld) extends LogHandler { private lazy val cached = new AtomicLong(0L) private lazy val queue = { val q = new ConcurrentLinkedQueue[LogRecord[_]] val t = new Thread { setDaemon(true) override def run(): Unit = while (true) { Option(q.poll()) match { case Some(record) => { cached.decrementAndGet() SynchronousLogHandler.log(AsynchronousLogHandler.this, record) Thread.sleep(1L) } case None => Thread.sleep(10L) } } } t.start() q } def withMaxBuffer(maxBuffer: Int): AsynchronousLogHandler = copy(maxBuffer = maxBuffer) def withOverflow(overflow: Overflow): AsynchronousLogHandler = copy(overflow = overflow) override def withFormatter(formatter: Formatter): AsynchronousLogHandler = copy(formatter = formatter) override def withWriter(writer: Writer): AsynchronousLogHandler = copy(writer = writer) override def setModifiers(modifiers: List[LogModifier]): AsynchronousLogHandler = copy(modifiers = modifiers) override def log[M](record: LogRecord[M]): Unit = { val add = if (!cached.incrementIfLessThan(maxBuffer)) { overflow match { case Overflow.DropOld => { queue.poll() true } case Overflow.DropNew => false case Overflow.Block => { while(!cached.incrementIfLessThan(maxBuffer)) { Thread.sleep(1L) } true } case Overflow.Error => throw new LogOverflowException(p"Queue filled (max: $maxBuffer) while attempting to asynchronously log") } } else { true } if (add) { queue.add(record) } } } object AsynchronousLogHandler { val DefaultMaxBuffer: Int = 1000 }
Example 189
Source File: package.scala From scribe with MIT License | 5 votes |
package scribe import scala.language.implicitConversions package object output { implicit def text(s: String): LogOutput = new TextOutput(s) implicit def seq2LogOutput(entries: Seq[LogOutput]): LogOutput = new CompositeOutput(entries.toList) implicit class EnhancedColor(color: Color) { def apply(out: LogOutput*): LogOutput = fg(out: _*) def fg(out: LogOutput*): LogOutput = output.fg(color, out: _*) def bg(out: LogOutput*): LogOutput = output.bg(color, out: _*) } def out(entries: LogOutput*): LogOutput = if (entries.length == 1) { entries.head } else { new CompositeOutput(entries.toList) } def color(color: Color, output: LogOutput*): LogOutput = fg(color, output: _*) def fg(color: Color, output: LogOutput*): LogOutput = new ColoredOutput(color, out(output: _*)) def bg(color: Color, output: LogOutput*): LogOutput = new BackgroundColoredOutput(color, out(output: _*)) def black(output: LogOutput*): LogOutput = fg(Color.Black, output: _*) def blue(output: LogOutput*): LogOutput = fg(Color.Blue, output: _*) def cyan(output: LogOutput*): LogOutput = fg(Color.Cyan, output: _*) def green(output: LogOutput*): LogOutput = fg(Color.Green, output: _*) def magenta(output: LogOutput*): LogOutput = fg(Color.Magenta, output: _*) def red(output: LogOutput*): LogOutput = fg(Color.Red, output: _*) def white(output: LogOutput*): LogOutput = fg(Color.White, output: _*) def yellow(output: LogOutput*): LogOutput = fg(Color.Yellow, output: _*) def gray(output: LogOutput*): LogOutput = fg(Color.Gray, output: _*) def brightBlue(output: LogOutput*): LogOutput = fg(Color.BrightBlue, output: _*) def brightCyan(output: LogOutput*): LogOutput = fg(Color.BrightCyan, output: _*) def brightGreen(output: LogOutput*): LogOutput = fg(Color.BrightGreen, output: _*) def brightMagenta(output: LogOutput*): LogOutput = fg(Color.BrightMagenta, output: _*) def brightRed(output: LogOutput*): LogOutput = fg(Color.BrightRed, output: _*) def brightWhite(output: LogOutput*): LogOutput = fg(Color.BrightWhite, output: _*) def brightYellow(output: LogOutput*): LogOutput = fg(Color.BrightYellow, output: _*) def bgBlack(output: LogOutput*): LogOutput = bg(Color.Black, output: _*) def bgBlue(output: LogOutput*): LogOutput = bg(Color.Blue, output: _*) def bgCyan(output: LogOutput*): LogOutput = bg(Color.Cyan, output: _*) def bgGreen(output: LogOutput*): LogOutput = bg(Color.Green, output: _*) def bgMagenta(output: LogOutput*): LogOutput = bg(Color.Magenta, output: _*) def bgRed(output: LogOutput*): LogOutput = bg(Color.Red, output: _*) def bgWhite(output: LogOutput*): LogOutput = bg(Color.White, output: _*) def bgYellow(output: LogOutput*): LogOutput = bg(Color.Yellow, output: _*) def bgGray(output: LogOutput*): LogOutput = bg(Color.Gray, output: _*) def bgBrightBlue(output: LogOutput*): LogOutput = bg(Color.BrightBlue, output: _*) def bgBrightCyan(output: LogOutput*): LogOutput = bg(Color.BrightCyan, output: _*) def bgBrightGreen(output: LogOutput*): LogOutput = bg(Color.BrightGreen, output: _*) def bgBrightMagenta(output: LogOutput*): LogOutput = bg(Color.BrightMagenta, output: _*) def bgBrightRed(output: LogOutput*): LogOutput = bg(Color.BrightRed, output: _*) def bgBrightWhite(output: LogOutput*): LogOutput = bg(Color.BrightWhite, output: _*) def bgBrightYellow(output: LogOutput*): LogOutput = bg(Color.BrightYellow, output: _*) def url(url: String, output: LogOutput): LogOutput = new URLOutput(url, output) def bold(output: LogOutput*): LogOutput = new BoldOutput(out(output: _*)) def italic(output: LogOutput*): LogOutput = new ItalicOutput(out(output: _*)) def underline(output: LogOutput*): LogOutput = new UnderlineOutput(out(output: _*)) def strikethrough(output: LogOutput*): LogOutput = new StrikethroughOutput(out(output: _*)) }
Example 190
Source File: Implicits.scala From openlaw-core with Apache License 2.0 | 5 votes |
package org.adridadou.openlaw.result import cats.implicits._ import cats.data.NonEmptyList import scala.concurrent.{Await, Future} import scala.concurrent.duration.Duration import scala.language.implicitConversions import scala.util.{Try, Failure => TFailure, Success => TSuccess} object Implicits { implicit class RichNonEmptyList[T](val nel: NonEmptyList[T]) extends AnyVal { def mkString: String = mkString(", ") def mkString(sep: String): String = nel.toList.mkString(sep) } implicit class RichTry[T](val t: Try[T]) extends AnyVal { def toResult: Result[T] = t match { case TSuccess(v) => Success(v) case TFailure(e: Exception) => Failure(e) // don't try to handle Error instances case TFailure(t) => throw t } } implicit class RichEither[T](val either: Either[String, T]) extends AnyVal { def toResult: Result[T] = either.left.map(FailureMessage(_)) } implicit class RichFuture[T](val future: Future[T]) extends AnyVal { def getResult(timeout: Duration): Result[T] = attempt(Await.result(future, timeout)) } implicit class RichResult[T](val result: Result[T]) extends AnyVal { def addCause(cause: Failure[T]): ResultNel[T] = result match { case Success(_) => cause.toResultNel case Left(original) => FailureNel(original, cause.value) } def addFailure[U >: T](cause: FailureCause): ResultNel[U] = result match { case s @ Success(_) => s.toResultNel case Left(original) => FailureNel(cause, original) } def addMessageToFailure[U >: T](message: String): ResultNel[U] = result match { case s @ Success(_) => s.toResultNel case Left(original) => FailureNel(FailureMessage(message), original) } def convert(pf: PartialFunction[Exception, Exception]): Result[T] = result.left.map { case FailureException(e, _) if pf.isDefinedAt(e) => FailureException(pf(e)) case f => f } def recoverMerge(f: FailureCause => T): T = result.fold(failure => f(failure), success => success) def recoverWith(pf: PartialFunction[FailureCause, Result[T]]): Result[T] = result.leftFlatMap { error => if (pf.isDefinedAt(error)) { pf(error) } else { result } } def toResultNel: ResultNel[T] = result.toValidatedNel def toFuture: Future[T] = result match { case Success(value) => Future.successful(value) case Failure(e, _) => Future.failed(e) } def getOrThrow(): T = result.valueOr(_.throwException()) } implicit class RichOption[T](val option: Option[T]) extends AnyVal { def toResult(message: String): Result[T] = option.map(x => Success(x)).getOrElse(Failure(message)) } implicit class RichResultNel[T](val result: ResultNel[T]) extends AnyVal { def toUnit: ResultNel[Unit] = result.map(_ => ()) def toResult: Result[T] = result.toEither.leftMap { case NonEmptyList(x, Seq()) => x case nel => FailureException(MultipleCauseException(nel)) } } implicit def exception2Result[A](e: Exception): Result[A] = Failure[A](e) implicit def unitResultConversion[T](wrapped: Result[T]): Result[Unit] = wrapped.map(_ => ()) implicit def failureCause2Exception[T](wrapped: FailureCause): Exception = wrapped.e }
Example 191
Source File: TestInputStream.scala From spark-testing-base with Apache License 2.0 | 5 votes |
package com.holdenkarau.spark.testing import org.apache.spark.streaming._ import org.apache.spark._ import org.apache.spark.rdd.RDD import org.apache.spark.SparkContext._ import scala.language.implicitConversions import scala.reflect.ClassTag import org.apache.spark.streaming.dstream.FriendlyInputDStream class TestInputStream[T: ClassTag](@transient var sc: SparkContext, ssc_ : StreamingContext, input: Seq[Seq[T]], numPartitions: Int) extends FriendlyInputDStream[T](ssc_) { def start() {} def stop() {} def compute(validTime: Time): Option[RDD[T]] = { logInfo("Computing RDD for time " + validTime) val index = ((validTime - ourZeroTime) / slideDuration - 1).toInt val selectedInput = if (index < input.size) input(index) else Seq[T]() // lets us test cases where RDDs are not created Option(selectedInput).map{si => val rdd = sc.makeRDD(si, numPartitions) logInfo("Created RDD " + rdd.id + " with " + selectedInput) rdd } } }
Example 192
Source File: DslCondition.scala From rule-engine with MIT License | 5 votes |
package nl.rabobank.oss.rules.dsl.nl.grammar import nl.rabobank.oss.rules.dsl.nl.grammar.DslCondition._ import nl.rabobank.oss.rules.engine._ import nl.rabobank.oss.rules.facts.Fact import nl.rabobank.oss.rules.utils.{SourcePosition, SourceUnknown} import scala.language.implicitConversions case class DslCondition(facts: Set[Fact[Any]], condition: Condition, sourcePosition: SourcePosition = SourceUnknown()) { def en[T](rhs: Fact[T]): DslConditionPart[T] = DslConditionPart(this, rhs, andPredicate) def en(rhs: DslCondition): DslCondition = combine(this, rhs, andPredicate) def of[T](rhs: Fact[T]): DslConditionPart[T] = DslConditionPart(this, rhs, orPredicate) def of(rhs: DslCondition): DslCondition = combine(this, rhs, orPredicate) private def combine(lhs: DslCondition, rhs: DslCondition, predicate: ConditionFunction): DslCondition = DslCondition(lhs.facts ++ rhs.facts, predicate(lhs.condition, rhs.condition)) } object DslCondition { val andPredicate: ConditionFunction = (l, r) => c => l(c) && r(c) val orPredicate: ConditionFunction = (l, r) => c => l(c) || r(c) val emptyTrueCondition: DslCondition = DslCondition(Set(), _ => true) def factFilledCondition[A](fact: Fact[A]): DslCondition = DslCondition(Set(fact), Conditions.exists(fact)) def andCombineConditions(initialDslCondition: DslCondition, dslConditions: DslCondition*): DslCondition = dslConditions.foldLeft(initialDslCondition)(_ en _) def orCombineConditions(initialDslCondition: DslCondition, dslConditions: DslCondition*): DslCondition = dslConditions.foldLeft(initialDslCondition)(_ of _) } trait DslConditionImplicits { implicit def toConditionDslPart[T](factDef : Fact[T]): DslConditionPart[T] = DslConditionPart(emptyTrueCondition, factDef, andPredicate) implicit def dslEvaluationToConditionDslPart[T](dslEvaluation: DslEvaluation[T]): DslEvaluationConditionPart[T] = DslEvaluationConditionPart(emptyTrueCondition, dslEvaluation, andPredicate) val altijd: DslCondition = emptyTrueCondition }
Example 193
Source File: DslEvaluation.scala From rule-engine with MIT License | 5 votes |
package nl.rabobank.oss.rules.dsl.nl.grammar import nl.rabobank.oss.rules.dsl.core.operators._ import nl.rabobank.oss.rules.dsl.core.temporal.LocalDate import nl.rabobank.oss.rules.dsl.nl.grammar.DslCondition._ import nl.rabobank.oss.rules.engine._ import nl.rabobank.oss.rules.facts.{ListFact, SingularFact} import nl.rabobank.oss.rules.finance.core.Quantity import nl.rabobank.oss.rules.finance.nl.{Bedrag, Percentage} import scala.language.implicitConversions //scalastyle:off method.name class ListUnwrappingEvaluation[A](wrapped: Evaluation[List[A]]) extends Evaluation[A] { override def apply(c: Context): Option[A] = wrapped(c) match { case Some(x :: xs) => Some(x) case _ => None } } class BinaryEvaluation[-A, B, +C](lhs: Evaluation[A], rhs: Evaluation[B], operatorDefinition: BinaryOperable[A, B, C]) extends Evaluation[C] { override def apply(c: Context): Option[C] = { val lhsValues = lhs(c).getOrElse(operatorDefinition.identityLeft) val rhsValues = rhs(c).getOrElse(operatorDefinition.identityRight) Some(operatorDefinition.operation(lhsValues, rhsValues)) } override def toString: String = s"${lhs.toString} ${operatorDefinition.representation} ${rhs.toString}" } class UnaryMinusEvaluation[+A : Quantity](eval: Evaluation[A]) extends Evaluation[A] { override def apply(c: Context): Option[A] = { val ev = implicitly[Quantity[A]] Some(ev.negate(eval(c).getOrElse(ev.zero))) } } class DslEvaluation[+A](val condition: DslCondition, val evaluation: Evaluation[A]) { def +[A1 >: A, B, C](other: DslEvaluation[B])(implicit ev: Addable[A1, B, C]): DslEvaluation[C] = { newDslEvaluation(other, new BinaryEvaluation[A1, B, C](evaluation, other.evaluation, ev)) } def -[A1 >: A, B, C](other: DslEvaluation[B])(implicit ev: Subtractable[A1, B, C]): DslEvaluation[C] = { newDslEvaluation(other, new BinaryEvaluation[A1, B, C](evaluation, other.evaluation, ev)) } def *[A1 >: A, B, C](other: DslEvaluation[B])(implicit ev: Multipliable[A1, B, C]): DslEvaluation[C] = { // Values can only be multiplied with BigDecimal. But this must be commutative. In the finance DSL we solve this // with overloads, but here, we're working with generic types based on the value types. Overloading doesn't work // here, due to type erasure (Numeric[BigDecimal] erases to the same type as Numeric[Bedrag]). Therefore we need // a new type class to work around this issue. newDslEvaluation(other, new BinaryEvaluation[A1, B, C](evaluation, other.evaluation, ev)) } def /[A1 >: A, B, C](other: DslEvaluation[B])(implicit ev: Divisible[A1, B, C]): DslEvaluation[C] = { newDslEvaluation(other, new BinaryEvaluation[A1, B, C](evaluation, other.evaluation, ev)) } def unary_-[B >: A : Quantity]: DslEvaluation[B] = { DslEvaluation(condition, new UnaryMinusEvaluation[B](evaluation)) } private def newDslEvaluation[B](other: DslEvaluation[Any], newEvaluation: Evaluation[B]) = DslEvaluation(andCombineConditions(condition, other.condition), newEvaluation) private def newDslEvaluation[B](other: SingularFact[B], newEvaluation: Evaluation[B]) = DslEvaluation(andCombineConditions(condition, factFilledCondition(other)), newEvaluation) } object DslEvaluation { def apply[A](condition: DslCondition, evaluation: Evaluation[A]): DslEvaluation[A] = new DslEvaluation[A](condition, evaluation) } trait DslEvaluationImplicits { implicit def factToDslEvaluation[A](fact: SingularFact[A]): DslEvaluation[A] = DslEvaluation(factFilledCondition(fact), new SingularFactEvaluation[A](fact)) implicit def listFactToDslEvaluation[A](fact: ListFact[A]): DslEvaluation[List[A]] = DslEvaluation(factFilledCondition(fact), new ListFactEvaluation[A](fact)) implicit def intToDslEvaluation(value: Int): DslEvaluation[Int] = DslEvaluation(emptyTrueCondition, new ConstantValueEvaluation[Int](value)) implicit def intToBigDecimalDslEvaluation(value: Int): DslEvaluation[BigDecimal] = DslEvaluation(emptyTrueCondition, new ConstantValueEvaluation[BigDecimal](value)) implicit def bigDecimalToDslEvaluation(value: BigDecimal): DslEvaluation[BigDecimal] = DslEvaluation(emptyTrueCondition, new ConstantValueEvaluation[BigDecimal](value)) implicit def bedragToDslEvaluation(value: Bedrag): DslEvaluation[Bedrag] = DslEvaluation(emptyTrueCondition, new ConstantValueEvaluation[Bedrag](value)) implicit def stringToDslEvaluation(value: String): DslEvaluation[String] = DslEvaluation(emptyTrueCondition, new ConstantValueEvaluation[String](value)) implicit def percentageToDslEvaluation(value: Percentage): DslEvaluation[Percentage] = DslEvaluation(emptyTrueCondition, new ConstantValueEvaluation[Percentage](value)) implicit def dslDatumToDslEvaluation(value: LocalDate): DslEvaluation[LocalDate] = DslEvaluation(emptyTrueCondition, new ConstantValueEvaluation[Datum](value)) }
Example 194
Source File: LijstFilterBerekeningen.scala From rule-engine with MIT License | 5 votes |
package nl.rabobank.oss.rules.dsl.nl.grammar import nl.rabobank.oss.rules.dsl.core.projections.{ProjectableListFields, ProjectedDslEvaluation} import nl.rabobank.oss.rules.dsl.core.projections.ProjectedDslEvaluation import nl.rabobank.oss.rules.dsl.nl.grammar.LijstBerekeningGlossary._ import nl.rabobank.oss.rules.dsl.nl.grammar.ComplexObjectProjections._ import nl.rabobank.oss.rules.engine._ import nl.rabobank.oss.rules.facts.ListFact import scala.language.implicitConversions class LijstFilter extends Berekening ( Gegeven (altijd) Bereken LijstGefilterd is (filter lijst LijstOngefilterd op (1,2,3,4)) en LijstGefilterdMetList is (filter lijst LijstOngefilterd op List(1,2,3,4)) en LijstGefilterdComplexObject is (filter lijst LijstOngefilterdComplexObject op LijstOngefilterdComplexObject.value van (3,4,6)) ) case class ComplexFilterObject(value: Int) class ComplexObjectProjections(complexObjectFact: ListFact[ComplexFilterObject]) extends ProjectableListFields[ComplexFilterObject] { def outerFact: ListFact[ComplexFilterObject] = complexObjectFact val value: ProjectedDslEvaluation[ComplexFilterObject, Int] = projectField(_.value) } object ComplexObjectProjections { implicit def toProjection(f: ListFact[ComplexFilterObject]): ComplexObjectProjections = new ComplexObjectProjections(f) }
Example 195
Source File: ProjectableFieldsCalculation.scala From rule-engine with MIT License | 5 votes |
package nl.rabobank.oss.rules.dsl.core.projections import scala.language.implicitConversions import nl.rabobank.oss.rules.dsl.core.projections.ProjectableFieldsGlossary._ import nl.rabobank.oss.rules.dsl.core.projections.ComplexObjectProjections.toProjections import nl.rabobank.oss.rules.dsl.nl.grammar._ import nl.rabobank.oss.rules.facts.{Fact, ListFact, SingularFact} class ProjectableFieldsCalculation extends Berekening( Gegeven(altijd) Bereken IntFact is ComplexFact.intValue + IntFact2 en StringFactList is ComplexFactList.stringValue ) case class ComplexObject(intValue: Int, stringValue: String) object ComplexObjectProjections { implicit def toProjections(fact: SingularFact[ComplexObject]): ComplexObjectProjections = new ComplexObjectProjections(fact) implicit def toProjections(fact: ListFact[ComplexObject]): ComplexObjectListProjections = new ComplexObjectListProjections(fact) } class ComplexObjectProjections(complexFact: Fact[ComplexObject]) extends ProjectableFields[ComplexObject] { override protected def outerFact: Fact[ComplexObject] = complexFact val intValue: DslEvaluation[Int] = projectField(_.intValue) val stringValue: DslEvaluation[String] = projectField(_.stringValue) } class ComplexObjectListProjections(complexFact: ListFact[ComplexObject]) extends ProjectableListFields[ComplexObject] { override protected def outerFact: ListFact[ComplexObject] = complexFact val intValue: DslEvaluation[List[Int]] = projectField(_.intValue) val stringValue: DslEvaluation[List[String]] = projectField(_.stringValue) }
Example 196
Source File: VertexPartition.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.graphx.impl import scala.reflect.ClassTag import org.apache.spark.graphx._ import org.apache.spark.util.collection.BitSet private[graphx] object VertexPartition { private[graphx] class VertexPartition[VD: ClassTag]( val index: VertexIdToIndexMap, val values: Array[VD], val mask: BitSet) extends VertexPartitionBase[VD] private[graphx] class VertexPartitionOps[VD: ClassTag](self: VertexPartition[VD]) extends VertexPartitionBaseOps[VD, VertexPartition](self) { def withIndex(index: VertexIdToIndexMap): VertexPartition[VD] = { new VertexPartition(index, self.values, self.mask) } def withValues[VD2: ClassTag](values: Array[VD2]): VertexPartition[VD2] = { new VertexPartition(self.index, values, self.mask) } def withMask(mask: BitSet): VertexPartition[VD] = { new VertexPartition(self.index, self.values, mask) } }
Example 197
Source File: StatusTrackerSuite.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark import scala.concurrent.duration._ import scala.language.implicitConversions import scala.language.postfixOps import org.scalatest.Matchers import org.scalatest.concurrent.Eventually._ import org.apache.spark.JobExecutionStatus._ class StatusTrackerSuite extends SparkFunSuite with Matchers with LocalSparkContext { test("basic status API usage") { sc = new SparkContext("local", "test", new SparkConf(false)) val jobFuture = sc.parallelize(1 to 10000, 2).map(identity).groupBy(identity).collectAsync() val jobId: Int = eventually(timeout(10 seconds)) { val jobIds = jobFuture.jobIds jobIds.size should be(1) jobIds.head } val jobInfo = eventually(timeout(10 seconds)) { sc.statusTracker.getJobInfo(jobId).get } jobInfo.status() should not be FAILED val stageIds = jobInfo.stageIds() stageIds.size should be(2) val firstStageInfo = eventually(timeout(10 seconds)) { sc.statusTracker.getStageInfo(stageIds(0)).get } firstStageInfo.stageId() should be(stageIds(0)) firstStageInfo.currentAttemptId() should be(0) firstStageInfo.numTasks() should be(2) eventually(timeout(10 seconds)) { val updatedFirstStageInfo = sc.statusTracker.getStageInfo(stageIds(0)).get updatedFirstStageInfo.numCompletedTasks() should be(2) updatedFirstStageInfo.numActiveTasks() should be(0) updatedFirstStageInfo.numFailedTasks() should be(0) } } test("getJobIdsForGroup()") { sc = new SparkContext("local", "test", new SparkConf(false)) // Passing `null` should return jobs that were not run in a job group: val defaultJobGroupFuture = sc.parallelize(1 to 1000).countAsync() val defaultJobGroupJobId = eventually(timeout(10 seconds)) { defaultJobGroupFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup(null).toSet should be (Set(defaultJobGroupJobId)) } // Test jobs submitted in job groups: sc.setJobGroup("my-job-group", "description") sc.statusTracker.getJobIdsForGroup("my-job-group") should be (Seq.empty) val firstJobFuture = sc.parallelize(1 to 1000).countAsync() val firstJobId = eventually(timeout(10 seconds)) { firstJobFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup("my-job-group") should be (Seq(firstJobId)) } val secondJobFuture = sc.parallelize(1 to 1000).countAsync() val secondJobId = eventually(timeout(10 seconds)) { secondJobFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup("my-job-group").toSet should be ( Set(firstJobId, secondJobId)) } } test("getJobIdsForGroup() with takeAsync()") { sc = new SparkContext("local", "test", new SparkConf(false)) sc.setJobGroup("my-job-group2", "description") sc.statusTracker.getJobIdsForGroup("my-job-group2") shouldBe empty val firstJobFuture = sc.parallelize(1 to 1000, 1).takeAsync(1) val firstJobId = eventually(timeout(10 seconds)) { firstJobFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup("my-job-group2") should be (Seq(firstJobId)) } } test("getJobIdsForGroup() with takeAsync() across multiple partitions") { sc = new SparkContext("local", "test", new SparkConf(false)) sc.setJobGroup("my-job-group2", "description") sc.statusTracker.getJobIdsForGroup("my-job-group2") shouldBe empty val firstJobFuture = sc.parallelize(1 to 1000, 2).takeAsync(999) val firstJobId = eventually(timeout(10 seconds)) { firstJobFuture.jobIds.head } eventually(timeout(10 seconds)) { sc.statusTracker.getJobIdsForGroup("my-job-group2") should have size 2 } } }
Example 198
Source File: ChatId.scala From telegram with Apache License 2.0 | 5 votes |
package com.bot4s.telegram.models import scala.language.implicitConversions sealed trait ChatId { def isChannel: Boolean def isChat: Boolean = !isChannel def toEither: Either[Long, String] } object ChatId { implicit def fromChat[T](id: Long): ChatId = ChatId(id) implicit def fromChannel[T](id: String) : ChatId = ChatId(id) final case class Chat(id: Long) extends ChatId { override def isChannel: Boolean = false override def toEither: Either[Long, String] = Left(id) } final case class Channel(id: String) extends ChatId { override def isChannel: Boolean = true override def toEither: Either[Long, String] = Right(id) } def apply(chat: Long): ChatId = Chat(chat) def apply(channel: String): ChatId = Channel(channel) }
Example 199
Source File: TestInputStream.scala From sscheck with Apache License 2.0 | 5 votes |
package es.ucm.fdi.sscheck.spark.streaming import org.apache.spark.streaming._ import org.apache.spark._ import org.apache.spark.rdd.RDD import org.apache.spark.SparkContext._ import scala.language.implicitConversions import scala.reflect.ClassTag import org.apache.spark.streaming.dstream.SscheckFriendlyInputDStream // import org.apache.spark.streaming.dstream.FriendlyInputDStream class TestInputStream[T: ClassTag](@transient var sc: SparkContext, ssc_ : StreamingContext, input: Seq[Seq[T]], numPartitions: Int) extends SscheckFriendlyInputDStream[T](ssc_) { def start() {} def stop() {} def compute(validTime: Time): Option[RDD[T]] = { logInfo("Computing RDD for time " + validTime) val index = ((validTime - ourZeroTime) / slideDuration - 1).toInt val selectedInput = if (index < input.size) input(index) else Seq[T]() // lets us test cases where RDDs are not created if (selectedInput == null) { return None } val rdd = sc.makeRDD(selectedInput, numPartitions) logInfo("Created RDD " + rdd.id + " with " + selectedInput) Some(rdd) } }
Example 200
Source File: PDStream.scala From sscheck with Apache License 2.0 | 5 votes |
package es.ucm.fdi.sscheck.gen import org.scalatest.matchers.{Matcher, MatchResult} import scala.language.implicitConversions object PDStream { def empty[A] : PDStream[A] = new PDStream(List():_*) implicit def batchSeq2dstream[A](batches : Seq[Batch[A]]) : PDStream[A] = PDStream(batches:_*) implicit def seqSeq2dstream[A](batches : Seq[Seq[A]]) : PDStream[A] = PDStream(batches.map(Batch(_:_*)):_*) } def subsetOf(other : PDStream[A]) : Boolean = { batches .zip(other.batches) .map({case (thisBatch, otherBatch) => thisBatch.forall(otherBatch.contains(_)) }) .forall(identity[Boolean]) } } trait DStreamMatchers { class DStreamSubsetOf[A](expectedSuperDStream : PDStream[A]) extends Matcher[PDStream[A]] { override def apply(observedDStream : PDStream[A]) : MatchResult = { // FIXME reimplement with Inspector for better report MatchResult(observedDStream.subsetOf(expectedSuperDStream), s"""$observedDStream is not a pointwise subset of $expectedSuperDStream""", s"""$observedDStream is a pointwise subset of $expectedSuperDStream""") } } def beSubsetOf[A](expectedSuperDStream : PDStream[A]) = new DStreamSubsetOf(expectedSuperDStream) } object DStreamMatchers extends DStreamMatchers