javax.annotation.Nullable Scala Examples

The following examples show how to use javax.annotation.Nullable. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: interface.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalog

import javax.annotation.Nullable

import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.DefinedByConstructorParams


// Note: all classes here are expected to be wrapped in Datasets and so must extend
// DefinedByConstructorParams for the catalog to be able to create encoders for them.


@InterfaceStability.Stable
class Function(
    val name: String,
    @Nullable val database: String,
    @Nullable val description: String,
    val className: String,
    val isTemporary: Boolean)
  extends DefinedByConstructorParams {

  override def toString: String = {
    "Function[" +
      s"name='$name', " +
      Option(database).map { d => s"database='$d', " }.getOrElse("") +
      Option(description).map { d => s"description='$d', " }.getOrElse("") +
      s"className='$className', " +
      s"isTemporary='$isTemporary']"
  }

} 
Example 2
Source File: Parameters.scala    From flint   with Apache License 2.0 5 votes vote down vote up
package com.twosigma.flint.timeseries.io.read

import java.time.{ Instant, ZonedDateTime, ZoneOffset }
import javax.annotation.Nullable

import scala.collection.mutable

import com.twosigma.flint.annotation.PythonApi

private[read] class Parameters private (
  val extraOptions: mutable.Map[String, String],
  var range: BeginEndRange = BeginEndRange(None, None, None, None)
) extends Serializable {

  def this(defaultOptions: Map[String, String]) =
    this(mutable.HashMap[String, String](defaultOptions.toSeq: _*))

  def option(key: String, valueOpt: Option[String]): Unit = valueOpt match {
    case Some(v) => extraOptions += key -> v
    case None => extraOptions -= key
  }

  
  @PythonApi
  private[read] def extraOptionsAsJavaMap: java.util.Map[String, String] = {
    import scala.collection.JavaConverters._
    extraOptions.asJava
  }

}

private[read] case class BeginEndRange(
  rawBeginNanosOpt: Option[Long] = None,
  rawEndNanosOpt: Option[Long] = None,
  expandBeginNanosOpt: Option[Long] = None,
  expandEndNanosOpt: Option[Long] = None
) {

  def beginNanos: Long = beginNanosOpt.getOrElse(
    throw new IllegalArgumentException("'begin' range must be set")
  )

  def endNanos: Long = endNanosOpt.getOrElse(
    throw new IllegalArgumentException("'end' range must be set")
  )

  def beginNanosOpt: Option[Long] = {
    rawBeginNanosOpt.map(_ - expandBeginNanosOpt.getOrElse(0L))
  }

  def endNanosOpt: Option[Long] = {
    rawEndNanosOpt.map(_ + expandEndNanosOpt.getOrElse(0L))
  }

  @PythonApi
  private[read] def beginNanosOrNull: java.lang.Long = beginNanosOpt.map(Long.box).orNull

  @PythonApi
  private[read] def endNanosOrNull: java.lang.Long = endNanosOpt.map(Long.box).orNull
} 
Example 3
Source File: interface.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalog

import javax.annotation.Nullable

import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.DefinedByConstructorParams


// Note: all classes here are expected to be wrapped in Datasets and so must extend
// DefinedByConstructorParams for the catalog to be able to create encoders for them.


@InterfaceStability.Stable
class Function(
    val name: String,
    @Nullable val database: String,
    @Nullable val description: String,
    val className: String,
    val isTemporary: Boolean)
  extends DefinedByConstructorParams {

  override def toString: String = {
    "Function[" +
      s"name='$name', " +
      Option(database).map { d => s"database='$d', " }.getOrElse("") +
      Option(description).map { d => s"description='$d', " }.getOrElse("") +
      s"className='$className', " +
      s"isTemporary='$isTemporary']"
  }

} 
Example 4
Source File: interface.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalog

import javax.annotation.Nullable

import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.DefinedByConstructorParams


// Note: all classes here are expected to be wrapped in Datasets and so must extend
// DefinedByConstructorParams for the catalog to be able to create encoders for them.


@InterfaceStability.Stable
class Function(
    val name: String,
    @Nullable val database: String,
    @Nullable val description: String,
    val className: String,
    val isTemporary: Boolean)
  extends DefinedByConstructorParams {

  override def toString: String = {
    "Function[" +
      s"name='$name', " +
      Option(database).map { d => s"database='$d', " }.getOrElse("") +
      Option(description).map { d => s"description='$d', " }.getOrElse("") +
      s"className='$className', " +
      s"isTemporary='$isTemporary']"
  }

} 
Example 5
Source File: interface.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalog

import javax.annotation.Nullable

import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.DefinedByConstructorParams


// Note: all classes here are expected to be wrapped in Datasets and so must extend
// DefinedByConstructorParams for the catalog to be able to create encoders for them.


@InterfaceStability.Stable
class Function(
    val name: String,
    @Nullable val database: String,
    @Nullable val description: String,
    val className: String,
    val isTemporary: Boolean)
  extends DefinedByConstructorParams {

  override def toString: String = {
    "Function[" +
      s"name='$name', " +
      Option(database).map { d => s"database='$d', " }.getOrElse("") +
      Option(description).map { d => s"description='$d', " }.getOrElse("") +
      s"className='$className', " +
      s"isTemporary='$isTemporary']"
  }

} 
Example 6
Source File: interface.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalog

import javax.annotation.Nullable

import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.catalyst.DefinedByConstructorParams


// Note: all classes here are expected to be wrapped in Datasets and so must extend
// DefinedByConstructorParams for the catalog to be able to create encoders for them.


@InterfaceStability.Stable
class Function(
    val name: String,
    @Nullable val database: String,
    @Nullable val description: String,
    val className: String,
    val isTemporary: Boolean)
  extends DefinedByConstructorParams {

  override def toString: String = {
    "Function[" +
      s"name='$name', " +
      Option(database).map { d => s"database='$d', " }.getOrElse("") +
      Option(description).map { d => s"description='$d', " }.getOrElse("") +
      s"className='$className', " +
      s"isTemporary='$isTemporary']"
  }

}