sbt.Keys.version Scala Examples

The following examples show how to use sbt.Keys.version. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: MimaBuild.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys.version

import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings


object MimaBuild {

  def excludeMember(fullName: String) = Seq(
      ProblemFilters.exclude[MissingMethodProblem](fullName),
      // Sometimes excluded methods have default arguments and
      // they are translated into public methods/fields($default$) in generated
      // bytecode. It is not possible to exhaustively list everything.
      // But this should be okay.
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$2"),
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$1"),
      ProblemFilters.exclude[MissingFieldProblem](fullName),
      ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
    )

  // Exclude a single class
  def excludeClass(className: String) = Seq(
      excludePackage(className),
      ProblemFilters.exclude[MissingClassProblem](className),
      ProblemFilters.exclude[MissingTypesProblem](className)
    )

  // Exclude a Spark class, that is in the package org.apache.spark
  def excludeSparkClass(className: String) = {
    excludeClass("org.apache.spark." + className)
  }

  // Exclude a Spark package, that is in the package org.apache.spark
  def excludeSparkPackage(packageName: String) = {
    excludePackage("org.apache.spark." + packageName)
  }

  def ignoredABIProblems(base: File, currentSparkVersion: String) = {

    // Excludes placed here will be used for all Spark versions
    val defaultExcludes = Seq()

    // Read package-private excludes from file
    val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
    val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")

    val ignoredClasses: Seq[String] =
      if (!classExcludeFilePath.exists()) {
        Seq()
      } else {
        IO.read(classExcludeFilePath).split("\n")
      }

    val ignoredMembers: Seq[String] =
      if (!memberExcludeFilePath.exists()) {
      Seq()
    } else {
      IO.read(memberExcludeFilePath).split("\n")
    }

    defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
    ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
  }

  def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
    val organization = "org.apache.spark"
    val previousSparkVersion = "2.0.0"
    val project = projectRef.project
    val fullId = "spark-" + project + "_2.11"
    mimaDefaultSettings ++
    Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
      binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
  }

} 
Example 2
Source File: SbtTasks.scala    From crm-seed   with Apache License 2.0 5 votes vote down vote up
import sbt.Keys.{baseDirectory, sourceManaged, target, version}
import sbt.{File, Process, Project, TaskKey}

object AllTasks extends GenerateGitVersionTask

trait GenerateGitVersionTask {
  import sbt._

  def versionCodeGenerator = {
    (sourceManaged in Compile, version) map { (d, v) =>
      val file = d / "BuildInfo.scala"
      IO.write(file,
        """package controllers
          |object BuildInfo {
          |  val version = "%s"
          |}
          | """.stripMargin.format(v))
      Seq(file)
    }
  }
} 
Example 3
Source File: VersionSourcePlugin.scala    From matcher   with MIT License 5 votes vote down vote up
import java.nio.charset.StandardCharsets

import sbt.Keys.{sourceGenerators, sourceManaged, version}
import sbt.{AutoPlugin, Def, IO, _}

object VersionSourcePlugin extends AutoPlugin {

  object V {
    val scalaPackage = SettingKey[String]("version-scala-package", "Scala package name where Version object is created")
    val subProject   = SettingKey[String]("version-sub-project", "Sub project name where Version object is created")
  }

  override def trigger: PluginTrigger = PluginTrigger.NoTrigger

  override def projectSettings: Seq[Def.Setting[_]] = {

    (Compile / sourceGenerators) += Def.task {

      val versionFile      = (Compile / sourceManaged).value / s"${V.scalaPackage.value.replace('.', '/')}/Version.scala"
      val versionExtractor = """(\d+)\.(\d+)\.(\d+).*""".r

      val (major, minor, patch) = version.value match {
        case versionExtractor(ma, mi, pa) => (ma.toInt, mi.toInt, pa.toInt)
        case x                            =>
          // SBT downloads only the latest commit, so "version" doesn't know, which tag is the nearest
          if (Option(System.getenv("TRAVIS")).exists(_.toBoolean)) (0, 0, 0)
          else throw new IllegalStateException(s"${V.subProject.value}: can't parse version by git tag: $x")
      }

      IO.write(
        versionFile,
        s"""package ${V.scalaPackage.value}
           |
           |object Version {
           |  val VersionString = "${version.value}"
           |  val VersionTuple = ($major, $minor, $patch)
           |}
           |""".stripMargin,
        charset = StandardCharsets.UTF_8
      )

      Seq(versionFile)
    }
  }
} 
Example 4
Source File: MimaBuild.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys.version

import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{mimaBinaryIssueFilters, mimaPreviousArtifacts}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings


object MimaBuild {

  def excludeMember(fullName: String) = Seq(
      ProblemFilters.exclude[MissingMethodProblem](fullName),
      // Sometimes excluded methods have default arguments and
      // they are translated into public methods/fields($default$) in generated
      // bytecode. It is not possible to exhaustively list everything.
      // But this should be okay.
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$2"),
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$1"),
      ProblemFilters.exclude[MissingFieldProblem](fullName),
      ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
    )

  // Exclude a single class
  def excludeClass(className: String) = Seq(
      ProblemFilters.exclude[Problem](className + ".*"),
      ProblemFilters.exclude[MissingClassProblem](className),
      ProblemFilters.exclude[MissingTypesProblem](className)
    )

  // Exclude a Spark class, that is in the package org.apache.spark
  def excludeSparkClass(className: String) = {
    excludeClass("org.apache.spark." + className)
  }

  // Exclude a Spark package, that is in the package org.apache.spark
  def excludeSparkPackage(packageName: String) = {
    ProblemFilters.exclude[Problem]("org.apache.spark." + packageName + ".*")
  }

  def ignoredABIProblems(base: File, currentSparkVersion: String) = {

    // Excludes placed here will be used for all Spark versions
    val defaultExcludes = Seq()

    // Read package-private excludes from file
    val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
    val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")

    val ignoredClasses: Seq[String] =
      if (!classExcludeFilePath.exists()) {
        Seq()
      } else {
        IO.read(classExcludeFilePath).split("\n")
      }

    val ignoredMembers: Seq[String] =
      if (!memberExcludeFilePath.exists()) {
      Seq()
    } else {
      IO.read(memberExcludeFilePath).split("\n")
    }

    defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
    ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
  }

  def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
    val organization = "org.apache.spark"
    val previousSparkVersion = "2.3.0"
    val project = projectRef.project
    val fullId = "spark-" + project + "_2.11"
    mimaDefaultSettings ++
    Seq(mimaPreviousArtifacts := Set(organization % fullId % previousSparkVersion),
      mimaBinaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
  }

} 
Example 5
Source File: TagsOnly.scala    From sbt-release-tags-only   with Apache License 2.0 5 votes vote down vote up
package sbtrelease.tagsonly

import sbt.Keys.{name, version}
import sbt.{AutoPlugin, Project, Setting, State, settingKey}
import sbtrelease.ReleasePlugin.autoImport.ReleaseKeys.versions
import sbtrelease.ReleasePlugin.autoImport.{releaseVersionBump, _}
import sbtrelease.ReleaseStateTransformations.reapply
import sbtrelease.{ReleasePlugin, Vcs, Version}

object TagsOnlyPlugin extends AutoPlugin {

  override def requires = ReleasePlugin

  override def trigger = allRequirements

  object autoImport {
    val releaseTagPrefix = settingKey[String]("Prefix to use for tags")

    lazy val TagsOnly = sbtrelease.tagsonly.TagsOnly
  }

  import autoImport._

  override lazy val projectSettings = Seq[Setting[_]](
    // Defaults for this plugin
    releaseTagPrefix := s"${name.value}",

    // Provide new defaults for some settings of the main `sbtrelease` plugin
    releaseUseGlobalVersion := false,
    releaseVersionBump := Version.Bump.Minor,
    releaseTagName := s"${releaseTagPrefix.value}-${version.value}",
    releaseTagComment := s"Releasing version ${version.value} of module: ${name.value}"
  )
}

object TagsOnly {
  def setVersionFromTags(tagPrefix: String): ReleaseStep = { st: State =>
    val bumpVersion = Function.chain(Seq(
      Project.extract(st).runTask(releaseNextVersion, st)._2,
      Project.extract(st).runTask(releaseVersion, st)._2))

    val git = getGit(st)
    val gitDescribeCmd = git.cmd("describe", "--match", s"$tagPrefix-*")
    val gitDescription = gitDescribeCmd.! match {
      case 0 => {
        st.log.info("Found existing tag matching the module name")
        git.cmd("describe", "--match", s"$tagPrefix-*").!!.trim
      }
      case 128 => {
        st.log.info("No existing tags matching the module name were found")
        s"$tagPrefix-0.0.0-auto-generated-initial-tag"
      }
      case _ => {
        throw new RuntimeException(s"Unexpected failure running $gitDescribeCmd")
      }
    }
    st.log.info("Most recent tag matching the module was '%s'" format gitDescription)

    val versionRegex = s"$tagPrefix-([0-9]+.[0-9]+.[0-9]+)-?(.*)?".r
    val versionToRelease = gitDescription match {
      case versionRegex(v, "") => v                   // No changes since last release
      case versionRegex(v, _) => s"${bumpVersion(v)}" // new version, bumped according to our chosen strategy
      case _ => sys.error(s"Tag '$gitDescription' failed to match expected format '$versionRegex'")
    }
    st.log.info("Setting release version to '%s'." format versionToRelease)

    st.put(versions, (versionToRelease, "unused 'next version' field"))
    reapply(Seq(
      version := versionToRelease
    ), st)
  }

  // The standard `pushChanges` release step relies on an upstream being configured.  This doesn't.
  lazy val pushTagsOnly: ReleaseStep = { st: State =>
    val gitPushTags = getGit(st).cmd("push", "origin", "--tags").!!.trim
    st.log.info("'git push origin --tags' returned '%s'" format gitPushTags)
    st
  }

  def getGit(st: State): Vcs = {
    Project.extract(st).get(releaseVcs).getOrElse(sys.error("Aborting release. Working directory is not a Git repository."))
  }
} 
Example 6
Source File: LagomPublish.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
import sbt.Keys.name
import sbt.Keys.publishTo
import sbt.Keys.version
import sbt.Def
import sbt.FeedbackProvidedException
import sbt.Resolver
import sbt.Task
import sbt.taskKey

object LagomPublish {
  val validatePublishSettings = taskKey[Unit]("Validate Lagom settings to publish released artifacts.")

  val validatePublishSettingsTask: Def.Initialize[Task[Unit]] = Def.task {
    val resolverValue: Option[Resolver] = publishTo.value
    val inReleaseVersion: Boolean       = !version.value.contains("SNAPSHOT")

    // the following implements the rules described in https://github.com/lagom/lagom/issues/1496#issuecomment-408398508
    // TODO: improve rules and validations depending on the version (SNAPSHOT vs release)
    (name.value, resolverValue) match {
      case (_, None)               => throw new PublishValidationFailed("`publishTo` not set.")
      case ("lagom-sbt-plugin", x) =>
        // see https://github.com/sbt/sbt-bintray/blob/7c93bacaae3ffc128564ceacb6e73ec4486525dd/src/main/scala/Bintray.scala#L16-L29 for
        // details on the syntax of Bintray Resolver names.
        if (inReleaseVersion && x.get.name != "Bintray-Sbt-Publish-lagom-sbt-plugin-releases-lagom-sbt-plugin") {
          throw new PublishValidationFailed(
            "Raw(Bintray-Sbt-Publish-lagom-sbt-plugin-releases-lagom-sbt-plugin)",
            x.get
          )
        }
      // TODO: Add a validation for "lagom-sbt-plugin" when the version is a snapshot.
      case (_, x) =>
        // TODO: this could be improved to assert the specific Resolver depending on release-vs-snapshot nature of the version.
        // e.g. sonatype-staging vs sonatype-snapshots
        if (!x.get.name.toLowerCase.contains("sonatype")) {
          throw new PublishValidationFailed("Sonatype", x.get)
        }
    }
  }

  val validatePublishSettingsSetting = validatePublishSettings := validatePublishSettingsTask.value

  private class PublishValidationFailed(message: String) extends RuntimeException with FeedbackProvidedException {
    def this(expectedResolver: String, actual: Resolver) =
      this(s"""Invalid resolver. Expected: "$expectedResolver" but was "$actual".""")
    override def toString = message
  }
} 
Example 7
Source File: MimaBuild.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys.version

import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings


object MimaBuild {

  def excludeMember(fullName: String) = Seq(
      ProblemFilters.exclude[MissingMethodProblem](fullName),
      // Sometimes excluded methods have default arguments and
      // they are translated into public methods/fields($default$) in generated
      // bytecode. It is not possible to exhaustively list everything.
      // But this should be okay.
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$2"),
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$1"),
      ProblemFilters.exclude[MissingFieldProblem](fullName),
      ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
    )

  // Exclude a single class
  def excludeClass(className: String) = Seq(
      excludePackage(className),
      ProblemFilters.exclude[MissingClassProblem](className),
      ProblemFilters.exclude[MissingTypesProblem](className)
    )

  // Exclude a Spark class, that is in the package org.apache.spark
  def excludeSparkClass(className: String) = {
    excludeClass("org.apache.spark." + className)
  }

  // Exclude a Spark package, that is in the package org.apache.spark
  def excludeSparkPackage(packageName: String) = {
    excludePackage("org.apache.spark." + packageName)
  }

  def ignoredABIProblems(base: File, currentSparkVersion: String) = {

    // Excludes placed here will be used for all Spark versions
    val defaultExcludes = Seq()

    // Read package-private excludes from file
    val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
    val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")

    val ignoredClasses: Seq[String] =
      if (!classExcludeFilePath.exists()) {
        Seq()
      } else {
        IO.read(classExcludeFilePath).split("\n")
      }

    val ignoredMembers: Seq[String] =
      if (!memberExcludeFilePath.exists()) {
      Seq()
    } else {
      IO.read(memberExcludeFilePath).split("\n")
    }

    defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
    ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
  }

  def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
    val organization = "org.apache.spark"
    val previousSparkVersion = "2.0.0"
    val project = projectRef.project
    val fullId = "spark-" + project + "_2.11"
    mimaDefaultSettings ++
    Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
      binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
  }

} 
Example 8
Source File: MimaBuild.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys.version

import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings


object MimaBuild {

  def excludeMember(fullName: String) = Seq(
      ProblemFilters.exclude[MissingMethodProblem](fullName),
      // Sometimes excluded methods have default arguments and
      // they are translated into public methods/fields($default$) in generated
      // bytecode. It is not possible to exhaustively list everything.
      // But this should be okay.
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$2"),
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$1"),
      ProblemFilters.exclude[MissingFieldProblem](fullName),
      ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
    )

  // Exclude a single class
  def excludeClass(className: String) = Seq(
      excludePackage(className),
      ProblemFilters.exclude[MissingClassProblem](className),
      ProblemFilters.exclude[MissingTypesProblem](className)
    )

  // Exclude a Spark class, that is in the package org.apache.spark
  def excludeSparkClass(className: String) = {
    excludeClass("org.apache.spark." + className)
  }

  // Exclude a Spark package, that is in the package org.apache.spark
  def excludeSparkPackage(packageName: String) = {
    excludePackage("org.apache.spark." + packageName)
  }

  def ignoredABIProblems(base: File, currentSparkVersion: String) = {

    // Excludes placed here will be used for all Spark versions
    val defaultExcludes = Seq()

    // Read package-private excludes from file
    val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
    val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")

    val ignoredClasses: Seq[String] =
      if (!classExcludeFilePath.exists()) {
        Seq()
      } else {
        IO.read(classExcludeFilePath).split("\n")
      }

    val ignoredMembers: Seq[String] =
      if (!memberExcludeFilePath.exists()) {
      Seq()
    } else {
      IO.read(memberExcludeFilePath).split("\n")
    }

    defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
    ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
  }

  def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
    val organization = "org.apache.spark"
    val previousSparkVersion = "2.0.0"
    val project = projectRef.project
    val fullId = "spark-" + project + "_2.11"
    mimaDefaultSettings ++
    Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
      binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
  }

} 
Example 9
Source File: MimaBuild.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys.version

import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings


object MimaBuild {

  def excludeMember(fullName: String) = Seq(
      ProblemFilters.exclude[MissingMethodProblem](fullName),
      // Sometimes excluded methods have default arguments and
      // they are translated into public methods/fields($default$) in generated
      // bytecode. It is not possible to exhaustively list everything.
      // But this should be okay.
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$2"),
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$1"),
      ProblemFilters.exclude[MissingFieldProblem](fullName),
      ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
    )

  // Exclude a single class and its corresponding object
  def excludeClass(className: String) = Seq(
      excludePackage(className),
      ProblemFilters.exclude[MissingClassProblem](className),
      ProblemFilters.exclude[MissingTypesProblem](className),
      excludePackage(className + "$"),
      ProblemFilters.exclude[MissingClassProblem](className + "$"),
      ProblemFilters.exclude[MissingTypesProblem](className + "$")
    )

  // Exclude a Spark class, that is in the package org.apache.spark
  def excludeSparkClass(className: String) = {
    excludeClass("org.apache.spark." + className)
  }

  // Exclude a Spark package, that is in the package org.apache.spark
  def excludeSparkPackage(packageName: String) = {
    excludePackage("org.apache.spark." + packageName)
  }

  def ignoredABIProblems(base: File, currentSparkVersion: String) = {

    // Excludes placed here will be used for all Spark versions
    val defaultExcludes = Seq()

    // Read package-private excludes from file
    val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
    val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")

    val ignoredClasses: Seq[String] =
      if (!classExcludeFilePath.exists()) {
        Seq()
      } else {
        IO.read(classExcludeFilePath).split("\n")
      }

    val ignoredMembers: Seq[String] =
      if (!memberExcludeFilePath.exists()) {
      Seq()
    } else {
      IO.read(memberExcludeFilePath).split("\n")
    }

    defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
    ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
  }

  def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
    val organization = "org.apache.spark"
    val previousSparkVersion = "1.3.0"
    val fullId = "spark-" + projectRef.project + "_2.10"
    mimaDefaultSettings ++
    Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
      binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
  }

} 
Example 10
Source File: MimaBuild.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys.version

import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings


object MimaBuild {

  def excludeMember(fullName: String) = Seq(
      ProblemFilters.exclude[MissingMethodProblem](fullName),
      // Sometimes excluded methods have default arguments and
      // they are translated into public methods/fields($default$) in generated
      // bytecode. It is not possible to exhaustively list everything.
      // But this should be okay.
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$2"),
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$1"),
      ProblemFilters.exclude[MissingFieldProblem](fullName),
      ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
    )

  // Exclude a single class and its corresponding object
  def excludeClass(className: String) = Seq(
      excludePackage(className),
      ProblemFilters.exclude[MissingClassProblem](className),
      ProblemFilters.exclude[MissingTypesProblem](className),
      excludePackage(className + "$"),
      ProblemFilters.exclude[MissingClassProblem](className + "$"),
      ProblemFilters.exclude[MissingTypesProblem](className + "$")
    )

  // Exclude a Spark class, that is in the package org.apache.spark
  def excludeSparkClass(className: String) = {
    excludeClass("org.apache.spark." + className)
  }

  // Exclude a Spark package, that is in the package org.apache.spark
  def excludeSparkPackage(packageName: String) = {
    excludePackage("org.apache.spark." + packageName)
  }

  def ignoredABIProblems(base: File, currentSparkVersion: String) = {

    // Excludes placed here will be used for all Spark versions
    val defaultExcludes = Seq()

    // Read package-private excludes from file
    val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
    val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")

    val ignoredClasses: Seq[String] =
      if (!classExcludeFilePath.exists()) {
        Seq()
      } else {
        IO.read(classExcludeFilePath).split("\n")
      }

    val ignoredMembers: Seq[String] =
      if (!memberExcludeFilePath.exists()) {
      Seq()
    } else {
      IO.read(memberExcludeFilePath).split("\n")
    }

    defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
    ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
  }

  def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
    val organization = "org.apache.spark"
    val previousSparkVersion = "1.4.0"
    val fullId = "spark-" + projectRef.project + "_2.10"
    mimaDefaultSettings ++
    Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
      binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
  }

} 
Example 11
Source File: MimaBuild.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys.version

import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{mimaBinaryIssueFilters, mimaPreviousArtifacts}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings


object MimaBuild {

  def excludeMember(fullName: String) = Seq(
      ProblemFilters.exclude[MissingMethodProblem](fullName),
      // Sometimes excluded methods have default arguments and
      // they are translated into public methods/fields($default$) in generated
      // bytecode. It is not possible to exhaustively list everything.
      // But this should be okay.
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$2"),
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$1"),
      ProblemFilters.exclude[MissingFieldProblem](fullName),
      ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
    )

  // Exclude a single class
  def excludeClass(className: String) = Seq(
      ProblemFilters.exclude[Problem](className + ".*"),
      ProblemFilters.exclude[MissingClassProblem](className),
      ProblemFilters.exclude[MissingTypesProblem](className)
    )

  // Exclude a Spark class, that is in the package org.apache.spark
  def excludeSparkClass(className: String) = {
    excludeClass("org.apache.spark." + className)
  }

  // Exclude a Spark package, that is in the package org.apache.spark
  def excludeSparkPackage(packageName: String) = {
    ProblemFilters.exclude[Problem]("org.apache.spark." + packageName + ".*")
  }

  def ignoredABIProblems(base: File, currentSparkVersion: String) = {

    // Excludes placed here will be used for all Spark versions
    val defaultExcludes = Seq()

    // Read package-private excludes from file
    val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
    val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")

    val ignoredClasses: Seq[String] =
      if (!classExcludeFilePath.exists()) {
        Seq()
      } else {
        IO.read(classExcludeFilePath).split("\n")
      }

    val ignoredMembers: Seq[String] =
      if (!memberExcludeFilePath.exists()) {
      Seq()
    } else {
      IO.read(memberExcludeFilePath).split("\n")
    }

    defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
    ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
  }

  def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
    val organization = "org.apache.spark"
    val previousSparkVersion = "2.2.0"
    val project = projectRef.project
    val fullId = "spark-" + project + "_2.11"
    mimaDefaultSettings ++
    Seq(mimaPreviousArtifacts := Set(organization % fullId % previousSparkVersion),
      mimaBinaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
  }

} 
Example 12
Source File: MimaBuild.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
import sbt._
import sbt.Keys.version

import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings


object MimaBuild {

  def excludeMember(fullName: String) = Seq(
      ProblemFilters.exclude[MissingMethodProblem](fullName),
      // Sometimes excluded methods have default arguments and
      // they are translated into public methods/fields($default$) in generated
      // bytecode. It is not possible to exhaustively list everything.
      // But this should be okay.
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$2"),
      ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$1"),
      ProblemFilters.exclude[MissingFieldProblem](fullName),
      ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
      ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
    )

  // Exclude a single class and its corresponding object
  def excludeClass(className: String) = Seq(
      excludePackage(className),
      ProblemFilters.exclude[MissingClassProblem](className),
      ProblemFilters.exclude[MissingTypesProblem](className),
      excludePackage(className + "$"),
      ProblemFilters.exclude[MissingClassProblem](className + "$"),
      ProblemFilters.exclude[MissingTypesProblem](className + "$")
    )

  // Exclude a Spark class, that is in the package org.apache.spark
  def excludeSparkClass(className: String) = {
    excludeClass("org.apache.spark." + className)
  }

  // Exclude a Spark package, that is in the package org.apache.spark
  def excludeSparkPackage(packageName: String) = {
    excludePackage("org.apache.spark." + packageName)
  }

  def ignoredABIProblems(base: File, currentSparkVersion: String) = {

    // Excludes placed here will be used for all Spark versions
    val defaultExcludes = Seq()

    // Read package-private excludes from file
    val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
    val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")

    val ignoredClasses: Seq[String] =
      if (!classExcludeFilePath.exists()) {
        Seq()
      } else {
        IO.read(classExcludeFilePath).split("\n")
      }

    val ignoredMembers: Seq[String] =
      if (!memberExcludeFilePath.exists()) {
      Seq()
    } else {
      IO.read(memberExcludeFilePath).split("\n")
    }

    defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
    ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
  }

  def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
    val organization = "org.apache.spark"
    val previousSparkVersion = "1.5.0"
    val fullId = "spark-" + projectRef.project + "_2.10"
    mimaDefaultSettings ++
    Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
      binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
  }

} 
Example 13
Source File: LatestVersion.scala    From crjdt   with Apache License 2.0 5 votes vote down vote up
import sbt.Keys.version
import sbt._
import sbtrelease.ReleasePlugin.autoImport.ReleaseStep
import sbtrelease.ReleaseStateTransformations.reapply

object LatestVersion extends AutoPlugin {
  object autoImport {
    lazy val latestVersion: SettingKey[String] =
      settingKey[String]("latest released version")

    lazy val setLatestVersion: ReleaseStep = { st: State =>
      val extracted = Project.extract(st)
      val newVersion = extracted.get(version)

      val latestVersionSbt = "latestVersion.sbt"
      val content = Seq(s"""latestVersion in ThisBuild := "$newVersion"""")

      IO.writeLines(file(latestVersionSbt), content)
      val vcs = sbtrelease.Vcs.detect(file("."))
      vcs.foreach(_.add(latestVersionSbt) !! st.log)

      reapply(Seq(latestVersion in ThisBuild := newVersion), st)
    }
  }
}