org.apache.spark.SPARK_VERSION Scala Examples

The following examples show how to use org.apache.spark.SPARK_VERSION. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: LauncherBackend.scala    From drizzle-spark   with Apache License 2.0 6 votes vote down vote up
package org.apache.spark.launcher

import java.net.{InetAddress, Socket}

import org.apache.spark.SPARK_VERSION
import org.apache.spark.launcher.LauncherProtocol._
import org.apache.spark.util.{ThreadUtils, Utils}


  protected def onDisconnected() : Unit = { }

  private def fireStopRequest(): Unit = {
    val thread = LauncherBackend.threadFactory.newThread(new Runnable() {
      override def run(): Unit = Utils.tryLogNonFatalError {
        onStopRequest()
      }
    })
    thread.start()
  }

  private class BackendConnection(s: Socket) extends LauncherConnection(s) {

    override protected def handle(m: Message): Unit = m match {
      case _: Stop =>
        fireStopRequest()

      case _ =>
        throw new IllegalArgumentException(s"Unexpected message type: ${m.getClass().getName()}")
    }

    override def close(): Unit = {
      try {
        super.close()
      } finally {
        onDisconnected()
        _isConnected = false
      }
    }

  }

}

private object LauncherBackend {

  val threadFactory = ThreadUtils.namedThreadFactory("LauncherBackend")

} 
Example 2
Source File: SparkILoop.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import java.io.BufferedReader

import scala.Predef.{println => _, _}
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
import scala.tools.nsc.util.stringFromStream
import scala.util.Properties.{javaVersion, javaVmName, versionString}


  def run(code: String, sets: Settings = new Settings): String = {
    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }

    stringFromStream { ostream =>
      Console.withOut(ostream) {
        val input = new BufferedReader(new StringReader(code))
        val output = new JPrintWriter(new OutputStreamWriter(ostream), true)
        val repl = new SparkILoop(input, output)

        if (sets.classpath.isDefault) {
          sets.classpath.value = sys.props("java.class.path")
        }
        repl process sets
      }
    }
  }
  def run(lines: List[String]): String = run(lines.map(_ + "\n").mkString)
} 
Example 3
Source File: LauncherBackend.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.launcher

import java.net.{InetAddress, Socket}

import org.apache.spark.SPARK_VERSION
import org.apache.spark.launcher.LauncherProtocol._
import org.apache.spark.util.{ThreadUtils, Utils}


  protected def onDisconnected() : Unit = { }

  private def fireStopRequest(): Unit = {
    val thread = LauncherBackend.threadFactory.newThread(new Runnable() {
      override def run(): Unit = Utils.tryLogNonFatalError {
        onStopRequest()
      }
    })
    thread.start()
  }

  private class BackendConnection(s: Socket) extends LauncherConnection(s) {

    override protected def handle(m: Message): Unit = m match {
      case _: Stop =>
        fireStopRequest()

      case _ =>
        throw new IllegalArgumentException(s"Unexpected message type: ${m.getClass().getName()}")
    }

    override def close(): Unit = {
      try {
        super.close()
      } finally {
        onDisconnected()
        _isConnected = false
      }
    }

  }

}

private object LauncherBackend {

  val threadFactory = ThreadUtils.namedThreadFactory("LauncherBackend")

} 
Example 4
Source File: SparkUtil.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import org.apache.spark.{SPARK_VERSION, TaskContext}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.execution.SQLExecution.EXECUTION_ID_KEY


  def isSparkVersionXandAbove(xVersion: String, isEqualComparision: Boolean = false): Boolean = {
    val tmpArray = SPARK_VERSION.split("\\.")
    // convert to float
    val sparkVersion = if (tmpArray.length >= 2) {
      (tmpArray(0) + "." + tmpArray(1)).toFloat
    } else {
      (tmpArray(0) + ".0").toFloat
    }
    // compare the versions
    if (isEqualComparision) {
      sparkVersion == xVersion.toFloat
    } else {
      sparkVersion >= xVersion.toFloat
    }
  }

  def isSparkVersionEqualTo(xVersion: String): Boolean = {
    isSparkVersionXandAbove(xVersion, true)
  }

  def setNullExecutionId(sparkSession: SparkSession): Unit = {
    // "spark.sql.execution.id is already set" exception will be
    // thrown if not set to null in spark2.2 and below versions
    if (!SparkUtil.isSparkVersionXandAbove("2.3")) {
      sparkSession.sparkContext.setLocalProperty(EXECUTION_ID_KEY, null)
    }
  }

} 
Example 5
Source File: SparkUtilTest.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import org.apache.spark.SPARK_VERSION
import org.scalatest.FunSuite

class SparkUtilTest extends FunSuite{

  test("Test Spark Version API with X and Above") {
    if (SPARK_VERSION.startsWith("2.1")) {
      assert(SparkUtil.isSparkVersionXandAbove("2.1"))
      assert(!SparkUtil.isSparkVersionXandAbove("2.2"))
      assert(!SparkUtil.isSparkVersionXandAbove("2.3"))
    } else if (SPARK_VERSION.startsWith("2.2")) {
      assert(SparkUtil.isSparkVersionXandAbove("2.1"))
      assert(SparkUtil.isSparkVersionXandAbove("2.2"))
      assert(!SparkUtil.isSparkVersionXandAbove("2.3"))
    } else {
      assert(SparkUtil.isSparkVersionXandAbove("2.1"))
      assert(SparkUtil.isSparkVersionXandAbove("2.2"))
      assert(SparkUtil.isSparkVersionXandAbove("2.3") ||
             SparkUtil.isSparkVersionXandAbove("2.4"))
    }
  }

  test("Test Spark Version API Equal to X") {
    if (SPARK_VERSION.startsWith("2.1")) {
      assert(SparkUtil.isSparkVersionEqualTo("2.1"))
      assert(!SparkUtil.isSparkVersionEqualTo("2.2"))
      assert(!SparkUtil.isSparkVersionEqualTo("2.3"))
    } else if (SPARK_VERSION.startsWith("2.2")) {
      assert(!SparkUtil.isSparkVersionEqualTo("2.1"))
      assert(SparkUtil.isSparkVersionEqualTo("2.2"))
      assert(!SparkUtil.isSparkVersionEqualTo("2.3"))
    } else {
      assert(!SparkUtil.isSparkVersionEqualTo("2.1"))
      assert(!SparkUtil.isSparkVersionEqualTo("2.2"))
      assert(SparkUtil.isSparkVersionEqualTo("2.3") ||
             SparkUtil.isSparkVersionXandAbove("2.4"))
    }
  }
} 
Example 6
Source File: LauncherBackend.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.launcher

import java.net.{InetAddress, Socket}

import org.apache.spark.SPARK_VERSION
import org.apache.spark.launcher.LauncherProtocol._
import org.apache.spark.util.{ThreadUtils, Utils}


  protected def onDisconnected() : Unit = { }

  private def fireStopRequest(): Unit = {
    val thread = LauncherBackend.threadFactory.newThread(new Runnable() {
      override def run(): Unit = Utils.tryLogNonFatalError {
        onStopRequest()
      }
    })
    thread.start()
  }

  private class BackendConnection(s: Socket) extends LauncherConnection(s) {

    override protected def handle(m: Message): Unit = m match {
      case _: Stop =>
        fireStopRequest()

      case _ =>
        throw new IllegalArgumentException(s"Unexpected message type: ${m.getClass().getName()}")
    }

    override def close(): Unit = {
      try {
        super.close()
      } finally {
        onDisconnected()
        _isConnected = false
      }
    }

  }

}

private object LauncherBackend {

  val threadFactory = ThreadUtils.namedThreadFactory("LauncherBackend")

} 
Example 7
Source File: compat.scala    From jgit-spark-connector   with Apache License 2.0 5 votes vote down vote up
package tech.sourced.engine.compat

import org.apache.spark.SPARK_VERSION
import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.execution.datasources.{
  LogicalRelation => SparkLogicalRelation
}
import org.apache.spark.sql.sources.BaseRelation

import scala.reflect.runtime.{universe => ru}

private[compat] object Compat {

  def apply[T](s22: T, s23: T): T = SPARK_VERSION match {
    case s if s.startsWith("2.2.") => s22
    case s if s.startsWith("2.3.") => s23
    case _ =>
      throw new RuntimeException(s"Unsupported SPARK_VERSION: $SPARK_VERSION")
  }

  lazy val ClassMirror = ru.runtimeMirror(Compat.getClass.getClassLoader)

}

private[engine] object LogicalRelation {

  def apply(rel: BaseRelation,
            out: Seq[AttributeReference],
            catalog: Option[CatalogTable]): SparkLogicalRelation =
    applyImpl(rel, out, catalog)

  private lazy val applyImpl =
    Compat(applySpark22(_, _, _), applySpark23(_, _, _))

  private lazy val typ = ru.typeOf[SparkLogicalRelation]
  private lazy val classSymbol =
    Compat.ClassMirror.reflectClass(typ.typeSymbol.asClass)
  private lazy val ctor =
    classSymbol.reflectConstructor(typ.decl(ru.termNames.CONSTRUCTOR).asMethod)

  def applySpark22(rel: BaseRelation,
                   out: Seq[AttributeReference],
                   catalog: Option[CatalogTable]): SparkLogicalRelation =
    ctor(rel, out, catalog).asInstanceOf[SparkLogicalRelation]

  def applySpark23(rel: BaseRelation,
                   out: Seq[AttributeReference],
                   catalog: Option[CatalogTable]): SparkLogicalRelation =
    ctor(rel, out, catalog, false).asInstanceOf[SparkLogicalRelation]

  def unapply(arg: SparkLogicalRelation)
    : Option[(BaseRelation, Seq[AttributeReference], Option[CatalogTable])] =
    unapplyImpl(arg)

  private lazy val unapplyImpl = Compat(unapplySpark22(_), unapplySpark23(_))

  def unapplySpark22(arg: SparkLogicalRelation)
    : Option[(BaseRelation, Seq[AttributeReference], Option[CatalogTable])] =
    Some((arg.relation, arg.output, arg.catalogTable))

  def unapplySpark23(arg: SparkLogicalRelation)
    : Option[(BaseRelation, Seq[AttributeReference], Option[CatalogTable])] = {
    val isStreaming = Compat.ClassMirror
      .reflect(arg)
      .reflectField(typ.decl(ru.TermName("isStreaming")).asTerm)
      .get
      .asInstanceOf[Boolean]
    if (isStreaming) {
      None
    } else {
      Some((arg.relation, arg.output, arg.catalogTable))
    }
  }

} 
Example 8
Source File: LauncherBackend.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.launcher

import java.net.{InetAddress, Socket}

import org.apache.spark.{SPARK_VERSION, SparkConf}
import org.apache.spark.launcher.LauncherProtocol._
import org.apache.spark.util.{ThreadUtils, Utils}


  protected def onDisconnected() : Unit = { }

  private def fireStopRequest(): Unit = {
    val thread = LauncherBackend.threadFactory.newThread(new Runnable() {
      override def run(): Unit = Utils.tryLogNonFatalError {
        onStopRequest()
      }
    })
    thread.start()
  }

  private class BackendConnection(s: Socket) extends LauncherConnection(s) {

    override protected def handle(m: Message): Unit = m match {
      case _: Stop =>
        fireStopRequest()

      case _ =>
        throw new IllegalArgumentException(s"Unexpected message type: ${m.getClass().getName()}")
    }

    override def close(): Unit = {
      try {
        super.close()
      } finally {
        onDisconnected()
        _isConnected = false
      }
    }

  }

}

private object LauncherBackend {

  val threadFactory = ThreadUtils.namedThreadFactory("LauncherBackend")

} 
Example 9
Source File: SparkILoop.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import java.io.{BufferedReader, FileReader}

import Predef.{println => _, _}
import scala.util.Properties.{jdkHome, javaVersion, versionString, javaVmName}

import scala.tools.nsc.interpreter.{JPrintWriter, ILoop}
import scala.tools.nsc.Settings
import scala.tools.nsc.util.stringFromStream


  def run(code: String, sets: Settings = new Settings): String = {
    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }

    stringFromStream { ostream =>
      Console.withOut(ostream) {
        val input = new BufferedReader(new StringReader(code))
        val output = new JPrintWriter(new OutputStreamWriter(ostream), true)
        val repl = new SparkILoop(input, output)

        if (sets.classpath.isDefault)
          sets.classpath.value = sys.props("java.class.path")

        repl process sets
      }
    }
  }
  def run(lines: List[String]): String = run(lines.map(_ + "\n").mkString)
} 
Example 10
Source File: LauncherBackend.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.launcher

import java.net.{InetAddress, Socket}

import org.apache.spark.SPARK_VERSION
import org.apache.spark.launcher.LauncherProtocol._
import org.apache.spark.util.{ThreadUtils, Utils}


  protected def onDisconnected() : Unit = { }

  private def fireStopRequest(): Unit = {
    val thread = LauncherBackend.threadFactory.newThread(new Runnable() {
      override def run(): Unit = Utils.tryLogNonFatalError {
        onStopRequest()
      }
    })
    thread.start()
  }

  private class BackendConnection(s: Socket) extends LauncherConnection(s) {

    override protected def handle(m: Message): Unit = m match {
      case _: Stop =>
        fireStopRequest()

      case _ =>
        throw new IllegalArgumentException(s"Unexpected message type: ${m.getClass().getName()}")
    }

    override def close(): Unit = {
      try {
        super.close()
      } finally {
        onDisconnected()
        _isConnected = false
      }
    }

  }

}

private object LauncherBackend {

  val threadFactory = ThreadUtils.namedThreadFactory("LauncherBackend")

}