=pro #16371 enable cross builds
* changed zeromq-scala-binding library * modified build-release task to include distributions from cross builds * removed tar-zip step from release script, because tgz package is not used from the website * release script now releases for all crossScalaVersions * dry run is the default now
This commit is contained in:
parent
338f61886e
commit
932f8a9bd7
7 changed files with 39 additions and 40 deletions
|
|
@ -11,6 +11,6 @@ Unidoc.javadocSettings
|
||||||
|
|
||||||
OSGi.zeroMQ
|
OSGi.zeroMQ
|
||||||
|
|
||||||
libraryDependencies ++= Dependencies.zeroMQ
|
Dependencies.zeroMQ
|
||||||
|
|
||||||
MimaKeys.previousArtifact := akkaPreviousArtifact("akka-zeromq").value
|
MimaKeys.previousArtifact := akkaPreviousArtifact("akka-zeromq").value
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,8 @@ object AkkaBuild extends Build {
|
||||||
lazy val buildSettings = Seq(
|
lazy val buildSettings = Seq(
|
||||||
organization := "com.typesafe.akka",
|
organization := "com.typesafe.akka",
|
||||||
version := "2.4-SNAPSHOT",
|
version := "2.4-SNAPSHOT",
|
||||||
scalaVersion := Dependencies.Versions.scalaVersion
|
scalaVersion := Dependencies.Versions.scalaVersion,
|
||||||
|
crossScalaVersions := Dependencies.Versions.crossScala
|
||||||
)
|
)
|
||||||
|
|
||||||
lazy val root = Project(
|
lazy val root = Project(
|
||||||
|
|
@ -51,7 +52,7 @@ object AkkaBuild extends Build {
|
||||||
S3.progress in S3.upload := true,
|
S3.progress in S3.upload := true,
|
||||||
mappings in S3.upload <<= (Release.releaseDirectory, version) map { (d, v) =>
|
mappings in S3.upload <<= (Release.releaseDirectory, version) map { (d, v) =>
|
||||||
val downloads = d / "downloads"
|
val downloads = d / "downloads"
|
||||||
val archivesPathFinder = (downloads * ("*" + v + ".zip")) +++ (downloads * ("*" + v + ".tgz"))
|
val archivesPathFinder = downloads * s"*$v.zip"
|
||||||
archivesPathFinder.get.map(file => (file -> ("akka/" + file.getName)))
|
archivesPathFinder.get.map(file => (file -> ("akka/" + file.getName)))
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,12 +5,12 @@ import sbt._
|
||||||
object Dependencies {
|
object Dependencies {
|
||||||
|
|
||||||
import DependencyHelpers._
|
import DependencyHelpers._
|
||||||
import DependencyHelpers.ScalaVersionDependentModuleID.post210Dependency
|
import DependencyHelpers.ScalaVersionDependentModuleID._
|
||||||
|
|
||||||
object Versions {
|
object Versions {
|
||||||
val scalaVersion = sys.props.get("akka.scalaVersion").getOrElse("2.10.4")
|
val crossScala = Seq("2.10.4", "2.11.4")
|
||||||
|
val scalaVersion = crossScala.head
|
||||||
val scalaStmVersion = sys.props.get("akka.build.scalaStmVersion").getOrElse("0.7")
|
val scalaStmVersion = sys.props.get("akka.build.scalaStmVersion").getOrElse("0.7")
|
||||||
val scalaZeroMQVersion = sys.props.get("akka.build.scalaZeroMQVersion").getOrElse("0.0.7")
|
|
||||||
val scalaTestVersion = sys.props.get("akka.build.scalaTestVersion").getOrElse("2.1.3")
|
val scalaTestVersion = sys.props.get("akka.build.scalaTestVersion").getOrElse("2.1.3")
|
||||||
val scalaCheckVersion = sys.props.get("akka.build.scalaCheckVersion").getOrElse("1.11.3")
|
val scalaCheckVersion = sys.props.get("akka.build.scalaCheckVersion").getOrElse("1.11.3")
|
||||||
}
|
}
|
||||||
|
|
@ -28,7 +28,7 @@ object Dependencies {
|
||||||
val scalaStm = "org.scala-stm" %% "scala-stm" % scalaStmVersion // Modified BSD (Scala)
|
val scalaStm = "org.scala-stm" %% "scala-stm" % scalaStmVersion // Modified BSD (Scala)
|
||||||
|
|
||||||
val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.5" // MIT
|
val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.5" // MIT
|
||||||
val zeroMQClient = "org.zeromq" %% "zeromq-scala-binding" % scalaZeroMQVersion // ApacheV2
|
val zeroMQClient = "org.spark-project.zeromq" %% "zeromq-scala-binding" % "0.0.7-spark" // ApacheV2
|
||||||
val uncommonsMath = "org.uncommons.maths" % "uncommons-maths" % "1.2.2a" exclude("jfree", "jcommon") exclude("jfree", "jfreechart") // ApacheV2
|
val uncommonsMath = "org.uncommons.maths" % "uncommons-maths" % "1.2.2a" exclude("jfree", "jcommon") exclude("jfree", "jfreechart") // ApacheV2
|
||||||
val osgiCore = "org.osgi" % "org.osgi.core" % "4.3.1" // ApacheV2
|
val osgiCore = "org.osgi" % "org.osgi.core" % "4.3.1" // ApacheV2
|
||||||
val osgiCompendium= "org.osgi" % "org.osgi.compendium" % "4.3.1" // ApacheV2
|
val osgiCompendium= "org.osgi" % "org.osgi.compendium" % "4.3.1" // ApacheV2
|
||||||
|
|
@ -91,7 +91,7 @@ object Dependencies {
|
||||||
|
|
||||||
val docs = Seq(Test.scalatest, Test.junit, Test.junitIntf)
|
val docs = Seq(Test.scalatest, Test.junit, Test.junitIntf)
|
||||||
|
|
||||||
val zeroMQ = Seq(protobuf, zeroMQClient, Test.scalatest, Test.junit)
|
val zeroMQ = deps(protobuf, zeroMQClient, Test.scalatest, Test.junit)
|
||||||
|
|
||||||
val contrib = Seq(Test.junitIntf, Test.commonsIo)
|
val contrib = Seq(Test.junitIntf, Test.commonsIo)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,8 +6,6 @@ import sbt.Keys._
|
||||||
|
|
||||||
object OSGi {
|
object OSGi {
|
||||||
|
|
||||||
val Seq(scalaEpoch, scalaMajor) = """(\d+)\.(\d+)\..*""".r.unapplySeq(Dependencies.Versions.scalaVersion).get.map(_.toInt)
|
|
||||||
|
|
||||||
// The included osgiSettings that creates bundles also publish the jar files
|
// The included osgiSettings that creates bundles also publish the jar files
|
||||||
// in the .../bundles directory which makes testing locally published artifacts
|
// in the .../bundles directory which makes testing locally published artifacts
|
||||||
// a pain. Create bundles but publish them to the normal .../jars directory.
|
// a pain. Create bundles but publish them to the normal .../jars directory.
|
||||||
|
|
@ -19,7 +17,7 @@ object OSGi {
|
||||||
OsgiKeys.exportPackage := Seq("akka*"),
|
OsgiKeys.exportPackage := Seq("akka*"),
|
||||||
OsgiKeys.privatePackage := Seq("akka.osgi.impl"),
|
OsgiKeys.privatePackage := Seq("akka.osgi.impl"),
|
||||||
//akka-actor packages are not imported, as contained in the CP
|
//akka-actor packages are not imported, as contained in the CP
|
||||||
OsgiKeys.importPackage := (osgiOptionalImports map optionalResolution) ++ Seq("!sun.misc", scalaImport(), configImport(), "*"),
|
OsgiKeys.importPackage := (osgiOptionalImports map optionalResolution) ++ Seq("!sun.misc", scalaVersion(scalaImport).value, configImport(), "*"),
|
||||||
// dynamicImportPackage needed for loading classes defined in configuration
|
// dynamicImportPackage needed for loading classes defined in configuration
|
||||||
OsgiKeys.dynamicImportPackage := Seq("*")
|
OsgiKeys.dynamicImportPackage := Seq("*")
|
||||||
)
|
)
|
||||||
|
|
@ -50,14 +48,19 @@ object OSGi {
|
||||||
"com.google.protobuf")
|
"com.google.protobuf")
|
||||||
|
|
||||||
def exports(packages: Seq[String] = Seq(), imports: Seq[String] = Nil) = osgiSettings ++ Seq(
|
def exports(packages: Seq[String] = Seq(), imports: Seq[String] = Nil) = osgiSettings ++ Seq(
|
||||||
OsgiKeys.importPackage := imports ++ defaultImports,
|
OsgiKeys.importPackage := imports ++ scalaVersion(defaultImports).value,
|
||||||
OsgiKeys.exportPackage := packages
|
OsgiKeys.exportPackage := packages
|
||||||
)
|
)
|
||||||
def defaultImports = Seq("!sun.misc", akkaImport(), configImport(), scalaImport(), "*")
|
def defaultImports(scalaVersion: String) = Seq("!sun.misc", akkaImport(), configImport(), scalaImport(scalaVersion), "*")
|
||||||
def akkaImport(packageName: String = "akka.*") = versionedImport(packageName, "2.4", "2.5")
|
def akkaImport(packageName: String = "akka.*") = versionedImport(packageName, "2.4", "2.5")
|
||||||
def configImport(packageName: String = "com.typesafe.config.*") = versionedImport(packageName, "1.2.0", "1.3.0")
|
def configImport(packageName: String = "com.typesafe.config.*") = versionedImport(packageName, "1.2.0", "1.3.0")
|
||||||
def protobufImport(packageName: String = "com.google.protobuf.*") = versionedImport(packageName, "2.5.0", "2.6.0")
|
def protobufImport(packageName: String = "com.google.protobuf.*") = versionedImport(packageName, "2.5.0", "2.6.0")
|
||||||
def scalaImport(packageName: String = "scala.*") = versionedImport(packageName, s"$scalaEpoch.$scalaMajor", s"$scalaEpoch.${scalaMajor+1}")
|
def scalaImport(version: String) = {
|
||||||
|
val packageName = "scala.*"
|
||||||
|
val ScalaVersion = """(\d+)\.(\d+)\..*""".r
|
||||||
|
val ScalaVersion(epoch, major) = version
|
||||||
|
versionedImport(packageName, s"$epoch.$major", s"$epoch.${major+1}")
|
||||||
|
}
|
||||||
def optionalResolution(packageName: String) = "%s;resolution:=optional".format(packageName)
|
def optionalResolution(packageName: String) = "%s;resolution:=optional".format(packageName)
|
||||||
def versionedImport(packageName: String, lower: String, upper: String) = s"""$packageName;version="[$lower,$upper)""""
|
def versionedImport(packageName: String, lower: String, upper: String) = s"""$packageName;version="[$lower,$upper)""""
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -22,13 +22,14 @@ object Release {
|
||||||
def buildReleaseCommand = Command.command("build-release") { state =>
|
def buildReleaseCommand = Command.command("build-release") { state =>
|
||||||
val extracted = Project.extract(state)
|
val extracted = Project.extract(state)
|
||||||
val release = extracted.get(releaseDirectory)
|
val release = extracted.get(releaseDirectory)
|
||||||
|
val dist = extracted.get(Dist.distDirectory)
|
||||||
val releaseVersion = extracted.get(version)
|
val releaseVersion = extracted.get(version)
|
||||||
val projectRef = extracted.get(thisProjectRef)
|
val projectRef = extracted.get(thisProjectRef)
|
||||||
val repo = extracted.get(Publish.defaultPublishTo)
|
val repo = extracted.get(Publish.defaultPublishTo)
|
||||||
val state1 = extracted.runAggregated(publishSigned in projectRef, state)
|
val state1 = extracted.runAggregated(publishSigned in projectRef, state)
|
||||||
val (state2, Seq(api, japi)) = extracted.runTask(unidoc in Compile, state1)
|
val (state2, Seq(api, japi)) = extracted.runTask(unidoc in Compile, state1)
|
||||||
val (state3, docs) = extracted.runTask(generate in Sphinx, state2)
|
val (state3, docs) = extracted.runTask(generate in Sphinx, state2)
|
||||||
val (state4, dist) = extracted.runTask(Dist.dist, state3)
|
val (state4, _) = extracted.runTask(Dist.dist, state3)
|
||||||
val (state5, activatorDist) = extracted.runTask(ActivatorDist.activatorDist in LocalProject(AkkaBuild.samples.id), state4)
|
val (state5, activatorDist) = extracted.runTask(ActivatorDist.activatorDist in LocalProject(AkkaBuild.samples.id), state4)
|
||||||
|
|
||||||
IO.delete(release)
|
IO.delete(release)
|
||||||
|
|
@ -37,9 +38,15 @@ object Release {
|
||||||
IO.copyDirectory(api, release / "api" / "akka" / releaseVersion)
|
IO.copyDirectory(api, release / "api" / "akka" / releaseVersion)
|
||||||
IO.copyDirectory(japi, release / "japi" / "akka" / releaseVersion)
|
IO.copyDirectory(japi, release / "japi" / "akka" / releaseVersion)
|
||||||
IO.copyDirectory(docs, release / "docs" / "akka" / releaseVersion)
|
IO.copyDirectory(docs, release / "docs" / "akka" / releaseVersion)
|
||||||
IO.copyFile(dist, release / "downloads" / dist.name)
|
|
||||||
|
// copy all distributions from dist dir to downloads dir
|
||||||
|
// may contain distributions from cross-builds
|
||||||
|
for (f <- (dist * "akka_*.zip").get)
|
||||||
|
IO.copyFile(f, release / "downloads" / f.name)
|
||||||
|
|
||||||
for (f <- (activatorDist * "*.zip").get)
|
for (f <- (activatorDist * "*.zip").get)
|
||||||
IO.copyFile(f, release / "downloads" / f.name)
|
IO.copyFile(f, release / "downloads" / f.name)
|
||||||
|
|
||||||
state5
|
state5
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -75,9 +75,9 @@
|
||||||
#
|
#
|
||||||
# Run the script in two stages.
|
# Run the script in two stages.
|
||||||
# First a dry run:
|
# First a dry run:
|
||||||
# shell> project/scripts/release --dry-run <version>
|
|
||||||
# And if all goes well a real run:
|
|
||||||
# shell> project/scripts/release <version>
|
# shell> project/scripts/release <version>
|
||||||
|
# And if all goes well a real run:
|
||||||
|
# shell> project/scripts/release --real-run <version>
|
||||||
#
|
#
|
||||||
# The artifacts published to oss.sonatype.org needs to be released by following the
|
# The artifacts published to oss.sonatype.org needs to be released by following the
|
||||||
# instructions under release here
|
# instructions under release here
|
||||||
|
|
@ -91,11 +91,13 @@ declare -r default_path="www"
|
||||||
declare -r release_dir="target/release"
|
declare -r release_dir="target/release"
|
||||||
declare release_server=${default_server}
|
declare release_server=${default_server}
|
||||||
declare release_path=${default_path}
|
declare release_path=${default_path}
|
||||||
declare -r unzipped_dir="target/dist/unzipped"
|
|
||||||
|
|
||||||
# flags
|
# flags
|
||||||
unset run_tests dry_run no_mima no_revert
|
unset run_tests dry_run no_mima no_revert
|
||||||
|
|
||||||
|
# dry-run is the default
|
||||||
|
dry_run=true
|
||||||
|
|
||||||
# get the source location for this script; handles symlinks
|
# get the source location for this script; handles symlinks
|
||||||
function get_script_path {
|
function get_script_path {
|
||||||
local source="${BASH_SOURCE[0]}"
|
local source="${BASH_SOURCE[0]}"
|
||||||
|
|
@ -113,12 +115,13 @@ declare -r script_dir="$(cd -P "$(dirname "${script_path}")" && pwd)"
|
||||||
# print usage info
|
# print usage info
|
||||||
function usage {
|
function usage {
|
||||||
cat <<EOM
|
cat <<EOM
|
||||||
|
Dry run is be default.
|
||||||
Usage: ${script_name} [options] VERSION
|
Usage: ${script_name} [options] VERSION
|
||||||
-h | --help Print this usage message
|
-h | --help Print this usage message
|
||||||
-t | --run-tests Run all tests before releasing
|
-t | --run-tests Run all tests before releasing
|
||||||
-s | --server SERVER Set the release server (default ${default_server})
|
-s | --server SERVER Set the release server (default ${default_server})
|
||||||
-p | --path PATH Set the path on the release server (default ${default_path})
|
-p | --path PATH Set the path on the release server (default ${default_path})
|
||||||
-n | --dry-run Build everything but do not push the release
|
-e | --real-run Build everything and push the release
|
||||||
-m | --no-mima Skip binary compatibility check in dry-run
|
-m | --no-mima Skip binary compatibility check in dry-run
|
||||||
-r | --no-revert On dry-run don't revert git commits and tags
|
-r | --no-revert On dry-run don't revert git commits and tags
|
||||||
EOM
|
EOM
|
||||||
|
|
@ -152,7 +155,7 @@ while true; do
|
||||||
-t | --run-tests ) run_tests=true; shift ;;
|
-t | --run-tests ) run_tests=true; shift ;;
|
||||||
-s | --server ) release_server=$2; shift 2 ;;
|
-s | --server ) release_server=$2; shift 2 ;;
|
||||||
-p | --path ) release_path=$2; shift 2 ;;
|
-p | --path ) release_path=$2; shift 2 ;;
|
||||||
-n | --dry-run) dry_run=true; shift ;;
|
-e | --real-run) dry_run=false; shift ;;
|
||||||
-m | --no-mima) no_mima=true; shift ;;
|
-m | --no-mima) no_mima=true; shift ;;
|
||||||
-r | --no-revert) no_revert=true; shift ;;
|
-r | --no-revert) no_revert=true; shift ;;
|
||||||
* ) break ;;
|
* ) break ;;
|
||||||
|
|
@ -197,15 +200,6 @@ function get_current_version {
|
||||||
echo ${result%$code0}
|
echo ${result%$code0}
|
||||||
}
|
}
|
||||||
|
|
||||||
# get the scalaBinaryVersion from sbt
|
|
||||||
# a little messy as the ansi escape codes are included
|
|
||||||
function get_scala_binary_version {
|
|
||||||
local result=$(sbt scalaBinaryVersion | tail -1 | cut -f2)
|
|
||||||
# remove ansi escape code from end
|
|
||||||
local code0=$(echo -e "\033[0m")
|
|
||||||
echo ${result%$code0}
|
|
||||||
}
|
|
||||||
|
|
||||||
# store the current git branch for cleaning up
|
# store the current git branch for cleaning up
|
||||||
declare -r initial_branch=$(get_current_branch)
|
declare -r initial_branch=$(get_current_branch)
|
||||||
|
|
||||||
|
|
@ -273,10 +267,6 @@ else
|
||||||
echolog "Publishing to ${publish_path}"
|
echolog "Publishing to ${publish_path}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echolog "Getting scalaBinaryVersion from sbt..."
|
|
||||||
declare -r scala_binary_version=$(get_scala_binary_version)
|
|
||||||
echolog "scalaBinaryVersion is ${scala_binary_version}"
|
|
||||||
|
|
||||||
[[ $run_tests ]] && echolog "All tests will be run"
|
[[ $run_tests ]] && echolog "All tests will be run"
|
||||||
|
|
||||||
# try ssh'ing to the release server
|
# try ssh'ing to the release server
|
||||||
|
|
@ -310,9 +300,7 @@ if [ ! $dry_run ]; then
|
||||||
else
|
else
|
||||||
RELEASE_OPT="-Dakka.genjavadoc.enabled=true"
|
RELEASE_OPT="-Dakka.genjavadoc.enabled=true"
|
||||||
fi
|
fi
|
||||||
try sbt $RELEASE_OPT build-release
|
try sbt $RELEASE_OPT +build-release
|
||||||
echolog "Creating gzipped tar download..."
|
|
||||||
try tar -cz -C ${unzipped_dir} -f ${release_dir}/downloads/akka_${scala_binary_version}-${version}.tgz akka-${version}
|
|
||||||
echolog "Successfully created local release"
|
echolog "Successfully created local release"
|
||||||
|
|
||||||
# check binary compatibility for dry run
|
# check binary compatibility for dry run
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue