java.lang.Boolean Scala Examples

The following examples show how to use java.lang.Boolean. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: JacksonScalaProvider.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package it.almawave.kb.http.providers

import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider
import com.fasterxml.jackson.databind.ObjectMapper
import javax.ws.rs.ext.Provider
import javax.ws.rs.Produces
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import javax.ws.rs.core.MediaType
import com.fasterxml.jackson.annotation.JsonInclude

import com.fasterxml.jackson.annotation.JsonAnyGetter

import com.fasterxml.jackson.databind.SerializationFeature
import com.fasterxml.jackson.databind.DeserializationFeature
import javax.ws.rs.ext.ContextResolver
import com.fasterxml.jackson.databind.JsonSerializer
import com.fasterxml.jackson.core.JsonGenerator
import com.fasterxml.jackson.databind.SerializerProvider
import java.lang.Double
import java.lang.Boolean

@Provider
@Produces(Array(MediaType.APPLICATION_JSON))
class JacksonScalaProvider extends JacksonJaxbJsonProvider with ContextResolver[ObjectMapper] {

  println("\n\nregistered " + this.getClass)

  val mapper = new ObjectMapper()

  mapper
    .registerModule(DefaultScalaModule)
    .setSerializationInclusion(JsonInclude.Include.ALWAYS)

    .configure(SerializationFeature.INDENT_OUTPUT, true)
    .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, true)

    .configure(SerializationFeature.WRITE_NULL_MAP_VALUES, true)
    .configure(SerializationFeature.WRITE_SINGLE_ELEM_ARRAYS_UNWRAPPED, true)
    .configure(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS, true)

    .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
    .configure(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true)
    .configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, true)
    .configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
    //    .setVisibility(JsonMethod.FIELD, Visibility.ANY);

    .getSerializerProvider.setNullValueSerializer(new JsonSerializer[Object] {
      def serialize(obj: Object, gen: JsonGenerator, provider: SerializerProvider) {
        obj match {
          case bool: Boolean   => gen.writeBoolean(false)
          case number: Integer => gen.writeNumber(0)
          case number: Double  => gen.writeNumber(0.0D)
          case text: String    => gen.writeString("")
          case _               => gen.writeString("")
        }
      }
    })

  super.setMapper(mapper)

  override def getContext(klasses: Class[_]): ObjectMapper = mapper

} 
Example 2
Source File: LocalLogisticRegressionModel.scala    From spark-ml-serving   with Apache License 2.0 5 votes vote down vote up
package io.hydrosphere.spark_ml_serving.classification

import java.lang.Boolean

import io.hydrosphere.spark_ml_serving.TypedTransformerConverter
import io.hydrosphere.spark_ml_serving.common._
import io.hydrosphere.spark_ml_serving.common.classification.LocalProbabilisticClassificationModel
import io.hydrosphere.spark_ml_serving.common.utils.DataUtils
import org.apache.spark.ml.classification.LogisticRegressionModel
import org.apache.spark.ml.linalg.{Matrix, SparseMatrix, Vector, Vectors}

class LocalLogisticRegressionModel(override val sparkTransformer: LogisticRegressionModel)
  extends LocalProbabilisticClassificationModel[LogisticRegressionModel] {}

object LocalLogisticRegressionModel
  extends SimpleModelLoader[LogisticRegressionModel]
  with TypedTransformerConverter[LogisticRegressionModel] {

  override def build(metadata: Metadata, data: LocalData): LogisticRegressionModel = {
    val constructor = classOf[LogisticRegressionModel].getDeclaredConstructor(
      classOf[String],
      classOf[Matrix],
      classOf[Vector],
      classOf[Int],
      java.lang.Boolean.TYPE
    )
    constructor.setAccessible(true)
    val coefficientMatrixParams =
      data.column("coefficientMatrix").get.data.head.asInstanceOf[Map[String, Any]]
    val coefficientMatrix = DataUtils.constructMatrix(coefficientMatrixParams)
    val interceptVectorParams =
      data.column("interceptVector").get.data.head.asInstanceOf[Map[String, Any]]
    val interceptVector = DataUtils.constructVector(interceptVectorParams)
    constructor
      .newInstance(
        metadata.uid,
        coefficientMatrix,
        interceptVector,
        data.column("numFeatures").get.data.head.asInstanceOf[java.lang.Integer],
        data.column("isMultinomial").get.data.head.asInstanceOf[java.lang.Boolean]
      )
      .setFeaturesCol(metadata.paramMap("featuresCol").asInstanceOf[String])
      .setPredictionCol(metadata.paramMap("predictionCol").asInstanceOf[String])
      .setProbabilityCol(metadata.paramMap("probabilityCol").asInstanceOf[String])
      .setRawPredictionCol(metadata.paramMap("rawPredictionCol").asInstanceOf[String])
      .setThreshold(metadata.paramMap("threshold").asInstanceOf[Double])
  }

  override implicit def toLocal(
    transformer: LogisticRegressionModel
  ): LocalTransformer[LogisticRegressionModel] = new LocalLogisticRegressionModel(transformer)
} 
Example 3
Source File: Node.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.types

import java.lang.Boolean

@SQLUserDefinedType(udt = classOf[NodeType])
case class Node(path: Seq[Any],
                pathDataTypeJson: String,
                var preRank: java.lang.Integer = null,
                var postRank: java.lang.Integer = null,
                isLeaf: java.lang.Boolean = null,
                var ordPath: Seq[Long] = null) {

  lazy val effectivePath: Seq[Any] = path.reverse.dropWhile(_ == null).reverse

  def compareToRecursive(left: Seq[Long], right: Seq[Long]): Int =
  {
    (left.isEmpty, right.isEmpty) match {
      case (true, true) => 0    // both are equal
      case (true, false) => -1  // left is smaller than right
      case (false, true) => 1   // right is smaller than right
      case (false, false) =>
        if (left.head == right.head) {
          compareToRecursive(left.tail, right.tail)
        } else {
          left.head.compareTo(right.head)
        }
      }
  }

  def compareTo(that: Node): Int = compareToRecursive(ordPath, that.ordPath)

  
  protected def this() = this(null, null)

  if (path != null && path.isEmpty) {
    throw new IllegalStateException("A Node cannot contain an empty path")
  }
}

case object Node {
  def apply(path: Seq[Any],
            pathDataType: DataType,
            preRank: Integer,
            postRank: Integer,
            isLeaf: Boolean,
            ordPath: Seq[Long]): Node =
    Node(path, if (pathDataType == null) null else pathDataType.json,
      preRank, postRank, isLeaf, ordPath)

  def apply(path: Seq[Any],
            pathDataType: DataType,
            preRank: Integer,
            postRank: Integer,
            isLeaf: Boolean): Node =
    Node(path, if (pathDataType == null) null else pathDataType.json, preRank, postRank, isLeaf)

  def apply(path: Seq[Any],
            pathDataType: DataType,
            ordPath: Seq[Long]): Node =
    Node(path, if (pathDataType == null) null else pathDataType.json, ordPath = ordPath)

  def apply(path: Seq[Any],
            pathDataType: DataType): Node =
    Node(path, if (pathDataType == null) null else pathDataType.json)

} 
Example 4
Source File: UnicomplexActorPublisher.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.stream

import java.lang.Boolean

import akka.actor.{ActorRef, ActorSystem, Props}
import akka.stream.actor.{ActorPublisher, ActorPublisherMessage}
import akka.stream.javadsl
import akka.stream.scaladsl.Source
import org.squbs.stream.TriggerEvent._
import org.squbs.unicomplex.{Active, Stopping, _}

final class UnicomplexActorPublisher extends ActorPublisher[LifecycleState] {

  override def receive = {
    case ActorPublisherMessage.Request(_) =>
      Unicomplex() ! SystemState
      Unicomplex() ! ObtainLifecycleEvents()
    case ActorPublisherMessage.Cancel | ActorPublisherMessage.SubscriptionTimeoutExceeded =>
      context.stop(self)
    case SystemState => Unicomplex() ! SystemState
    case element: LifecycleState if demand_? => onNext(element)
  }

  private def demand_? : Boolean = totalDemand > 0
}

case class LifecycleManaged[T, M]() {
  val trigger = Source.actorPublisher[LifecycleState](Props.create(classOf[UnicomplexActorPublisher]))
    .collect {
      case Active => ENABLE
      case Stopping => DISABLE
    }

  val source = (in: Source[T, M]) => new Trigger(eagerComplete = true).source(in, trigger)

  // for Java
  def source(in: javadsl.Source[T, M]): javadsl.Source[T, akka.japi.Pair[M, ActorRef]] = source(in.asScala)
    .mapMaterializedValue {
      case (m1, m2) => akka.japi.Pair(m1, m2)
    }.asJava
} 
Example 5
Source File: ConnectMongoConverterSpec.scala    From kafka-connect-mongodb   with Apache License 2.0 5 votes vote down vote up
package com.startapp.data

import java.lang.Boolean
import java.util

import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.scalatest.{FlatSpec, Matchers}

class ConnectMongoConverterSpec extends FlatSpec with Matchers{
  private val FIELD1_NAME = "fieldInt"
  private val FIELD1_VALUE = new Integer(5)
  private val FIELD2_NAME = "fieldString"
  private val FIELD2_VALUE = "str"
  private val FIELD3_NAME = "fieldBoolean"
  private val FIELD3_VALUE = new Boolean(true)

  val schema = SchemaBuilder.struct().name("test schema")
    .field(FIELD1_NAME, Schema.INT32_SCHEMA)
    .field(FIELD2_NAME, Schema.STRING_SCHEMA)
    .field(FIELD3_NAME, Schema.BOOLEAN_SCHEMA)
    .build()

  "No Schema Connect Mongo Converter Bad Data" should "throw an exception" in {
    var exceptionThrown = false

    val badData = new Struct(schema)

    try{
      checkJsonMap(NoSchemaConnectMongoConverter, badData)
    }
    catch {
      case _ : java.lang.ClassCastException => exceptionThrown = true
    }

    exceptionThrown should be(true)
  }

  "No Schema Connect Mongo Converter Good Data" should "return the same map" in {
    val jsonMap = new util.HashMap[String, Object]()
    jsonMap.put(FIELD1_NAME, FIELD1_VALUE)
    jsonMap.put(FIELD2_NAME, FIELD2_VALUE)
    jsonMap.put(FIELD3_NAME, FIELD3_VALUE)

    checkJsonMap(NoSchemaConnectMongoConverter, jsonMap)
  }

  "Schema Connect Mongo Converter Bad Data" should "throw an exception" in {
    var exceptionThrown = false

    val badData = new util.HashMap[String, Object]()
    badData.put(FIELD1_NAME, FIELD1_VALUE)

    try {
      checkJsonMap(SchemaConnectMongoConverter, badData)
    }
    catch {
      case _ : java.lang.ClassCastException => exceptionThrown = true
    }

    exceptionThrown should be(true)
  }

  "Schema Connect Mongo Converter Good Data" should "convert data to json map" in {
    val data = new Struct(schema)
      .put(FIELD1_NAME, FIELD1_VALUE)
      .put(FIELD2_NAME, FIELD2_VALUE)
      .put(FIELD3_NAME, FIELD3_VALUE)

    checkJsonMap(SchemaConnectMongoConverter, data)
  }

  private def checkJsonMap(converter : ConnectMongoConverter, value: Object): Unit ={
    val newJsonMap = converter.toJsonMap(value).toMap

    newJsonMap(FIELD1_NAME) should be(FIELD1_VALUE)
    newJsonMap(FIELD2_NAME) should be(FIELD2_VALUE)
    newJsonMap(FIELD3_NAME) should be(FIELD3_VALUE)
  }

}