=pro #15031 move build logic into separate files
This commit is contained in:
parent
26a7b029da
commit
ee5ea5e13f
9 changed files with 522 additions and 523 deletions
|
|
@ -6,28 +6,23 @@ package akka
|
||||||
|
|
||||||
import sbt._
|
import sbt._
|
||||||
import sbt.Keys._
|
import sbt.Keys._
|
||||||
import com.typesafe.sbt.SbtMultiJvm
|
import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys.{ MultiJvm, extraOptions, scalatestOptions }
|
||||||
import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys.{ MultiJvm, extraOptions, jvmOptions, scalatestOptions, multiNodeExecuteTests, multiNodeJavaName, multiNodeHostsFileName, multiNodeTargetDirName, multiTestOptions }
|
|
||||||
import com.typesafe.sbt.SbtScalariform
|
|
||||||
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
|
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
|
||||||
import com.typesafe.sbt.osgi.SbtOsgi.{ OsgiKeys, defaultOsgiSettings }
|
|
||||||
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
|
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
|
||||||
import com.typesafe.tools.mima.plugin.MimaKeys.previousArtifact
|
import com.typesafe.tools.mima.plugin.MimaKeys.previousArtifact
|
||||||
import com.typesafe.tools.mima.plugin.MimaKeys.reportBinaryIssues
|
import com.typesafe.tools.mima.plugin.MimaKeys.reportBinaryIssues
|
||||||
import com.typesafe.tools.mima.plugin.MimaKeys.binaryIssueFilters
|
import com.typesafe.tools.mima.plugin.MimaKeys.binaryIssueFilters
|
||||||
import com.typesafe.sbt.SbtSite.site
|
import com.typesafe.sbt.SbtSite.site
|
||||||
import com.typesafe.sbt.site.SphinxSupport
|
import java.io.{InputStreamReader, FileInputStream, File}
|
||||||
import com.typesafe.sbt.site.SphinxSupport.{ enableOutput, generatePdf, generatedPdf, generateEpub, generatedEpub, sphinxInputs, sphinxPackages, Sphinx }
|
|
||||||
import com.typesafe.sbt.preprocess.Preprocess.{ preprocess, preprocessExts, preprocessVars, simplePreprocess }
|
|
||||||
import java.lang.Boolean.getBoolean
|
|
||||||
import java.io.{PrintWriter, InputStreamReader, FileInputStream, File}
|
|
||||||
import java.nio.charset.Charset
|
|
||||||
import java.util.Properties
|
import java.util.Properties
|
||||||
import annotation.tailrec
|
import sbtunidoc.Plugin.UnidocKeys.unidoc
|
||||||
import sbtunidoc.Plugin.{ ScalaUnidoc, JavaUnidoc, scalaJavaUnidocSettings, genjavadocSettings, scalaUnidocSettings }
|
import TestExtras.{ JUnitFileReporting, StatsDMetrics, GraphiteBuildEvents }
|
||||||
import sbtunidoc.Plugin.UnidocKeys.{ unidoc, unidocProjectFilter }
|
|
||||||
import com.typesafe.sbt.S3Plugin.{ S3, s3Settings }
|
import com.typesafe.sbt.S3Plugin.{ S3, s3Settings }
|
||||||
import akka.TestExtras.{ JUnitFileReporting, StatsDMetrics, GraphiteBuildEvents }
|
import Unidoc.{ scaladocSettings, scaladocSettingsNoVerificationOfDiagrams, unidocSettings, javadocSettings }
|
||||||
|
import Formatting.{ formatSettings, docFormatSettings }
|
||||||
|
import MultiNode.{ multiJvmSettings, defaultMultiJvmOptions, defaultMultiJvmScalatestOptions }
|
||||||
|
import com.typesafe.sbt.site.SphinxSupport
|
||||||
|
import com.typesafe.sbt.site.SphinxSupport.Sphinx
|
||||||
|
|
||||||
object AkkaBuild extends Build {
|
object AkkaBuild extends Build {
|
||||||
System.setProperty("akka.mode", "test") // Is there better place for this?
|
System.setProperty("akka.mode", "test") // Is there better place for this?
|
||||||
|
|
@ -37,36 +32,22 @@ object AkkaBuild extends Build {
|
||||||
|
|
||||||
val enableMiMa = false
|
val enableMiMa = false
|
||||||
|
|
||||||
val requestedScalaVersion = System.getProperty("akka.scalaVersion", "2.10.4")
|
|
||||||
val Seq(scalaEpoch, scalaMajor) = """(\d+)\.(\d+)\..*""".r.unapplySeq(requestedScalaVersion).get.map(_.toInt)
|
|
||||||
|
|
||||||
lazy val buildSettings = Seq(
|
lazy val buildSettings = Seq(
|
||||||
organization := "com.typesafe.akka",
|
organization := "com.typesafe.akka",
|
||||||
version := "2.4-SNAPSHOT",
|
version := "2.4-SNAPSHOT",
|
||||||
scalaVersion := requestedScalaVersion,
|
scalaVersion := Dependencies.Versions.scalaVersion,
|
||||||
scalaBinaryVersion := System.getProperty("akka.scalaBinaryVersion", if (scalaVersion.value contains "-") scalaVersion.value else scalaBinaryVersion.value)
|
scalaBinaryVersion := System.getProperty("akka.scalaBinaryVersion", if (scalaVersion.value contains "-") scalaVersion.value else scalaBinaryVersion.value)
|
||||||
)
|
)
|
||||||
|
|
||||||
lazy val akka = Project(
|
lazy val akka = Project(
|
||||||
id = "akka",
|
id = "akka",
|
||||||
base = file("."),
|
base = file("."),
|
||||||
settings = parentSettings ++
|
settings = parentSettings ++ Release.settings ++ unidocSettings ++ Publish.versionSettings ++
|
||||||
Release.settings ++ unidocSettings ++ Publish.versionSettings ++
|
SphinxDoc.akkaSettings ++ Dist.settings ++ s3Settings ++ mimaSettings ++ scaladocSettings ++
|
||||||
GraphiteBuildEvents.settings ++
|
GraphiteBuildEvents.settings ++ Protobuf.settings ++ Unidoc.settings(Seq(samples), Seq(remoteTests)) ++ Seq(
|
||||||
SphinxSupport.settings ++ Dist.settings ++ s3Settings ++ mimaSettings ++ scaladocSettings ++
|
|
||||||
Protobuf.settings ++ inTask(unidoc)(Seq(
|
|
||||||
unidocProjectFilter in ScalaUnidoc := docProjectFilter,
|
|
||||||
unidocProjectFilter in JavaUnidoc := docProjectFilter,
|
|
||||||
apiMappings in ScalaUnidoc := (apiMappings in (Compile, doc)).value
|
|
||||||
)) ++ Seq(
|
|
||||||
parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", "false").toBoolean,
|
parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", "false").toBoolean,
|
||||||
Publish.defaultPublishTo in ThisBuild <<= crossTarget / "repository",
|
Publish.defaultPublishTo in ThisBuild <<= crossTarget / "repository",
|
||||||
Dist.distExclude := Seq(actorTests.id, docs.id, samples.id, osgi.id),
|
Dist.distExclude := Seq(actorTests.id, docs.id, samples.id, osgi.id),
|
||||||
// generate online version of docs
|
|
||||||
sphinxInputs in Sphinx <<= sphinxInputs in Sphinx in LocalProject(docs.id) map { inputs => inputs.copy(tags = inputs.tags :+ "online") },
|
|
||||||
// don't regenerate the pdf, just reuse the akka-docs version
|
|
||||||
generatedPdf in Sphinx <<= generatedPdf in Sphinx in LocalProject(docs.id) map identity,
|
|
||||||
generatedEpub in Sphinx <<= generatedEpub in Sphinx in LocalProject(docs.id) map identity,
|
|
||||||
|
|
||||||
S3.host in S3.upload := "downloads.typesafe.com.s3.amazonaws.com",
|
S3.host in S3.upload := "downloads.typesafe.com.s3.amazonaws.com",
|
||||||
S3.progress in S3.upload := true,
|
S3.progress in S3.upload := true,
|
||||||
|
|
@ -75,7 +56,7 @@ object AkkaBuild extends Build {
|
||||||
val archivesPathFinder = (downloads * ("*" + v + ".zip")) +++ (downloads * ("*" + v + ".tgz"))
|
val archivesPathFinder = (downloads * ("*" + v + ".zip")) +++ (downloads * ("*" + v + ".tgz"))
|
||||||
archivesPathFinder.get.map(file => (file -> ("akka/" + file.getName)))
|
archivesPathFinder.get.map(file => (file -> ("akka/" + file.getName)))
|
||||||
},
|
},
|
||||||
// add reportBinaryIssues to validatePullRequest on minor version maintenance branch
|
|
||||||
validatePullRequest <<= (unidoc in Compile, SphinxSupport.generate in Sphinx in docs) map { (_, _) => }
|
validatePullRequest <<= (unidoc in Compile, SphinxSupport.generate in Sphinx in docs) map { (_, _) => }
|
||||||
),
|
),
|
||||||
aggregate = Seq(actor, testkit, actorTests, remote, remoteTests, camel, cluster, slf4j, agent,
|
aggregate = Seq(actor, testkit, actorTests, remote, remoteTests, camel, cluster, slf4j, agent,
|
||||||
|
|
@ -91,40 +72,6 @@ object AkkaBuild extends Build {
|
||||||
persistence, kernel, osgi, contrib, multiNodeTestkit)
|
persistence, kernel, osgi, contrib, multiNodeTestkit)
|
||||||
)
|
)
|
||||||
|
|
||||||
// this detached pseudo-project is used for running the tests against a different Scala version than the one used for compilation
|
|
||||||
// usage:
|
|
||||||
// all-tests/test (or test-only)
|
|
||||||
// customizing (on the SBT command line):
|
|
||||||
// set scalaVersion in allTests := "2.11.0"
|
|
||||||
lazy val multiJvmProjects = Seq(remoteTests, cluster)
|
|
||||||
lazy val allTests = Project(
|
|
||||||
id = "all-tests",
|
|
||||||
base = file("all-tests"),
|
|
||||||
dependencies = (
|
|
||||||
((akka.aggregate: Seq[ProjectReference]) map (_ % "test->test")) ++
|
|
||||||
(multiJvmProjects map (_ % "multi-jvm->multi-jvm"))
|
|
||||||
),
|
|
||||||
settings = defaultSettings ++ multiJvmSettings ++ Seq(
|
|
||||||
scalaVersion := requestedScalaVersion,
|
|
||||||
publishArtifact := false,
|
|
||||||
definedTests in Test := Nil
|
|
||||||
) ++ (
|
|
||||||
(akka.aggregate: Seq[ProjectReference])
|
|
||||||
filterNot {
|
|
||||||
case LocalProject(name) => name contains "slf4j"
|
|
||||||
case _ => false
|
|
||||||
} map {
|
|
||||||
pr => definedTests in Test <++= definedTests in (pr, Test)
|
|
||||||
}
|
|
||||||
) ++ (
|
|
||||||
multiJvmProjects map {
|
|
||||||
pr => definedTests in MultiJvm <++= definedTests in (pr, MultiJvm)
|
|
||||||
}
|
|
||||||
) ++ Seq(
|
|
||||||
scalatestOptions in MultiJvm := defaultMultiJvmScalatestOptions
|
|
||||||
)
|
|
||||||
) configs (MultiJvm)
|
|
||||||
|
|
||||||
lazy val actor = Project(
|
lazy val actor = Project(
|
||||||
id = "akka-actor",
|
id = "akka-actor",
|
||||||
base = file("akka-actor"),
|
base = file("akka-actor"),
|
||||||
|
|
@ -189,7 +136,7 @@ object AkkaBuild extends Build {
|
||||||
extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src =>
|
extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src =>
|
||||||
(name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq
|
(name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq
|
||||||
},
|
},
|
||||||
scalatestOptions in MultiJvm := defaultMultiJvmScalatestOptions,
|
scalatestOptions in MultiJvm := defaultMultiJvmScalatestOptions.value,
|
||||||
publishArtifact in Compile := false,
|
publishArtifact in Compile := false,
|
||||||
reportBinaryIssues := () // disable bin comp check
|
reportBinaryIssues := () // disable bin comp check
|
||||||
)
|
)
|
||||||
|
|
@ -206,7 +153,7 @@ object AkkaBuild extends Build {
|
||||||
extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src =>
|
extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src =>
|
||||||
(name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq
|
(name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq
|
||||||
},
|
},
|
||||||
scalatestOptions in MultiJvm := defaultMultiJvmScalatestOptions,
|
scalatestOptions in MultiJvm := defaultMultiJvmScalatestOptions.value,
|
||||||
previousArtifact := akkaPreviousArtifact("akka-cluster")
|
previousArtifact := akkaPreviousArtifact("akka-cluster")
|
||||||
)
|
)
|
||||||
) configs (MultiJvm)
|
) configs (MultiJvm)
|
||||||
|
|
@ -487,24 +434,8 @@ object AkkaBuild extends Build {
|
||||||
dependencies = Seq(actor, testkit % "test->test",
|
dependencies = Seq(actor, testkit % "test->test",
|
||||||
remote % "compile;test->test", cluster, slf4j, agent, zeroMQ, camel, osgi,
|
remote % "compile;test->test", cluster, slf4j, agent, zeroMQ, camel, osgi,
|
||||||
persistence % "compile;test->test"),
|
persistence % "compile;test->test"),
|
||||||
settings = defaultSettings ++ docFormatSettings ++ site.settings ++ site.sphinxSupport() ++ site.publishSite ++ sphinxPreprocessing ++ Seq(
|
settings = defaultSettings ++ docFormatSettings ++ site.settings ++ site.sphinxSupport() ++ site.publishSite ++
|
||||||
sourceDirectory in Sphinx <<= baseDirectory / "rst",
|
SphinxDoc.docsSettings ++ SphinxDoc.sphinxPreprocessing ++ Seq(
|
||||||
sphinxPackages in Sphinx <+= baseDirectory { _ / "_sphinx" / "pygments" },
|
|
||||||
// copy akka-contrib/docs into our rst_preprocess/contrib (and apply substitutions)
|
|
||||||
preprocess in Sphinx <<= (preprocess in Sphinx,
|
|
||||||
baseDirectory in contrib,
|
|
||||||
target in preprocess in Sphinx,
|
|
||||||
cacheDirectory,
|
|
||||||
preprocessExts in Sphinx,
|
|
||||||
preprocessVars in Sphinx,
|
|
||||||
streams) map { (orig, src, target, cacheDir, exts, vars, s) =>
|
|
||||||
val contribSrc = Map("contribSrc" -> "../../../akka-contrib")
|
|
||||||
simplePreprocess(src / "docs", target / "contrib", cacheDir / "sphinx" / "preprocessed-contrib", exts, vars ++ contribSrc, s.log)
|
|
||||||
orig
|
|
||||||
},
|
|
||||||
enableOutput in generatePdf in Sphinx := true,
|
|
||||||
enableOutput in generateEpub in Sphinx := true,
|
|
||||||
unmanagedSourceDirectories in Test <<= sourceDirectory in Sphinx apply { _ ** "code" get },
|
|
||||||
libraryDependencies ++= Dependencies.docs,
|
libraryDependencies ++= Dependencies.docs,
|
||||||
publishArtifact in Compile := false,
|
publishArtifact in Compile := false,
|
||||||
unmanagedSourceDirectories in ScalariformKeys.format in Test <<= unmanagedSourceDirectories in Test,
|
unmanagedSourceDirectories in ScalariformKeys.format in Test <<= unmanagedSourceDirectories in Test,
|
||||||
|
|
@ -534,13 +465,6 @@ object AkkaBuild extends Build {
|
||||||
)
|
)
|
||||||
) configs (MultiJvm)
|
) configs (MultiJvm)
|
||||||
|
|
||||||
// // this issue will be fixed in M8, for now we need to exclude M6, M7 modules used to compile the compiler
|
|
||||||
def excludeOldModules(m: ModuleID) = List("M6", "M7").foldLeft(m) { (mID, mStone) =>
|
|
||||||
val version = s"2.11.0-$mStone"
|
|
||||||
mID.exclude("org.scala-lang.modules", s"scala-parser-combinators_$version").exclude("org.scala-lang.modules", s"scala-xml_$version")
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Settings
|
// Settings
|
||||||
|
|
||||||
override lazy val settings =
|
override lazy val settings =
|
||||||
|
|
@ -577,61 +501,6 @@ object AkkaBuild extends Build {
|
||||||
|""".stripMargin
|
|""".stripMargin
|
||||||
)
|
)
|
||||||
|
|
||||||
val excludeTestNames = SettingKey[Seq[String]]("exclude-test-names")
|
|
||||||
val excludeTestTags = SettingKey[Set[String]]("exclude-test-tags")
|
|
||||||
val onlyTestTags = SettingKey[Set[String]]("only-test-tags")
|
|
||||||
|
|
||||||
lazy val defaultMultiJvmOptions: Seq[String] = {
|
|
||||||
import scala.collection.JavaConverters._
|
|
||||||
// multinode.D= and multinode.X= makes it possible to pass arbitrary
|
|
||||||
// -D or -X arguments to the forked jvm, e.g.
|
|
||||||
// -Dmultinode.Djava.net.preferIPv4Stack=true -Dmultinode.Xmx512m -Dmultinode.XX:MaxPermSize=256M
|
|
||||||
// -DMultiJvm.akka.cluster.Stress.nrOfNodes=15
|
|
||||||
val MultinodeJvmArgs = "multinode\\.(D|X)(.*)".r
|
|
||||||
val knownPrefix = Set("multnode.", "akka.", "MultiJvm.")
|
|
||||||
val akkaProperties = System.getProperties.propertyNames.asScala.toList.collect {
|
|
||||||
case MultinodeJvmArgs(a, b) =>
|
|
||||||
val value = System.getProperty("multinode." + a + b)
|
|
||||||
"-" + a + b + (if (value == "") "" else "=" + value)
|
|
||||||
case key: String if knownPrefix.exists(pre => key.startsWith(pre)) => "-D" + key + "=" + System.getProperty(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
"-Xmx256m" :: akkaProperties :::
|
|
||||||
(if (getBoolean("sbt.log.noformat")) List("-Dakka.test.nocolor=true") else Nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// for excluding tests by name use system property: -Dakka.test.names.exclude=TimingSpec
|
|
||||||
// not supported by multi-jvm tests
|
|
||||||
lazy val useExcludeTestNames: Seq[String] = systemPropertyAsSeq("akka.test.names.exclude")
|
|
||||||
|
|
||||||
// for excluding tests by tag use system property: -Dakka.test.tags.exclude=<tag name>
|
|
||||||
// note that it will not be used if you specify -Dakka.test.tags.only
|
|
||||||
lazy val useExcludeTestTags: Set[String] = {
|
|
||||||
if (useOnlyTestTags.isEmpty) systemPropertyAsSeq("akka.test.tags.exclude").toSet
|
|
||||||
else Set.empty
|
|
||||||
}
|
|
||||||
|
|
||||||
// for running only tests by tag use system property: -Dakka.test.tags.only=<tag name>
|
|
||||||
lazy val useOnlyTestTags: Set[String] = systemPropertyAsSeq("akka.test.tags.only").toSet
|
|
||||||
|
|
||||||
def executeMultiJvmTests: Boolean = {
|
|
||||||
useOnlyTestTags.contains("long-running") || !useExcludeTestTags.contains("long-running")
|
|
||||||
}
|
|
||||||
|
|
||||||
def systemPropertyAsSeq(name: String): Seq[String] = {
|
|
||||||
val prop = System.getProperty(name, "")
|
|
||||||
if (prop.isEmpty) Seq.empty else prop.split(",").toSeq
|
|
||||||
}
|
|
||||||
|
|
||||||
val multiNodeEnabled = java.lang.Boolean.getBoolean("akka.test.multi-node")
|
|
||||||
|
|
||||||
lazy val defaultMultiJvmScalatestOptions: Seq[String] = {
|
|
||||||
val excludeTags = useExcludeTestTags.toSeq
|
|
||||||
Seq("-C", "org.scalatest.akka.QuietReporter") ++
|
|
||||||
(if (excludeTags.isEmpty) Seq.empty else Seq("-l", if (multiNodeEnabled) excludeTags.mkString("\"", " ", "\"") else excludeTags.mkString(" "))) ++
|
|
||||||
(if (useOnlyTestTags.isEmpty) Seq.empty else Seq("-n", if (multiNodeEnabled) useOnlyTestTags.mkString("\"", " ", "\"") else useOnlyTestTags.mkString(" ")))
|
|
||||||
}
|
|
||||||
|
|
||||||
val (mavenLocalResolver, mavenLocalResolverSettings) =
|
val (mavenLocalResolver, mavenLocalResolverSettings) =
|
||||||
System.getProperty("akka.build.M2Dir") match {
|
System.getProperty("akka.build.M2Dir") match {
|
||||||
case null => (Resolver.mavenLocal, Seq.empty)
|
case null => (Resolver.mavenLocal, Seq.empty)
|
||||||
|
|
@ -658,7 +527,7 @@ object AkkaBuild extends Build {
|
||||||
pomIncludeRepository := (_ => false) // do not leak internal repositories during staging
|
pomIncludeRepository := (_ => false) // do not leak internal repositories during staging
|
||||||
)
|
)
|
||||||
|
|
||||||
lazy val defaultSettings = baseSettings ++ mimaSettings ++ resolverSettings ++
|
lazy val defaultSettings = baseSettings ++ mimaSettings ++ resolverSettings ++ TestExtras.Filter.settings ++
|
||||||
Protobuf.settings ++ Seq(
|
Protobuf.settings ++ Seq(
|
||||||
// compile options
|
// compile options
|
||||||
scalacOptions in Compile ++= Seq("-encoding", "UTF-8", "-target:jvm-1.6", "-deprecation", "-feature", "-unchecked", "-Xlog-reflective-calls", "-Xlint"),
|
scalacOptions in Compile ++= Seq("-encoding", "UTF-8", "-target:jvm-1.6", "-deprecation", "-feature", "-unchecked", "-Xlog-reflective-calls", "-Xlint"),
|
||||||
|
|
@ -697,23 +566,6 @@ object AkkaBuild extends Build {
|
||||||
parallelExecution in Test := System.getProperty("akka.parallelExecution", "false").toBoolean,
|
parallelExecution in Test := System.getProperty("akka.parallelExecution", "false").toBoolean,
|
||||||
logBuffered in Test := System.getProperty("akka.logBufferedTests", "false").toBoolean,
|
logBuffered in Test := System.getProperty("akka.logBufferedTests", "false").toBoolean,
|
||||||
|
|
||||||
excludeTestNames := useExcludeTestNames,
|
|
||||||
excludeTestTags := useExcludeTestTags,
|
|
||||||
onlyTestTags := useOnlyTestTags,
|
|
||||||
|
|
||||||
// add filters for tests excluded by name
|
|
||||||
testOptions in Test <++= excludeTestNames map { _.map(exclude => Tests.Filter(test => !test.contains(exclude))) },
|
|
||||||
|
|
||||||
// add arguments for tests excluded by tag
|
|
||||||
testOptions in Test <++= excludeTestTags map { tags =>
|
|
||||||
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-l", tags.mkString(" ")))
|
|
||||||
},
|
|
||||||
|
|
||||||
// add arguments for running only tests by tag
|
|
||||||
testOptions in Test <++= onlyTestTags map { tags =>
|
|
||||||
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-n", tags.mkString(" ")))
|
|
||||||
},
|
|
||||||
|
|
||||||
// show full stack traces and test case durations
|
// show full stack traces and test case durations
|
||||||
testOptions in Test += Tests.Argument("-oDF"),
|
testOptions in Test += Tests.Argument("-oDF"),
|
||||||
|
|
||||||
|
|
@ -732,170 +584,6 @@ object AkkaBuild extends Build {
|
||||||
// the tasks that to run for validation is defined in defaultSettings
|
// the tasks that to run for validation is defined in defaultSettings
|
||||||
val validatePullRequestTask = validatePullRequest := ()
|
val validatePullRequestTask = validatePullRequest := ()
|
||||||
|
|
||||||
def githubUrl(v: String): String = {
|
|
||||||
val branch = if (v.endsWith("SNAPSHOT")) "master" else "v" + v
|
|
||||||
"http://github.com/akka/akka/tree/" + branch
|
|
||||||
}
|
|
||||||
|
|
||||||
// pre-processing settings for sphinx
|
|
||||||
lazy val sphinxPreprocessing = inConfig(Sphinx)(Seq(
|
|
||||||
target in preprocess <<= baseDirectory / "rst_preprocessed",
|
|
||||||
preprocessExts := Set("rst", "py"),
|
|
||||||
// customization of sphinx @<key>@ replacements, add to all sphinx-using projects
|
|
||||||
// add additional replacements here
|
|
||||||
preprocessVars <<= (scalaVersion, version) { (s, v) =>
|
|
||||||
val BinVer = """(\d+\.\d+)\.\d+""".r
|
|
||||||
Map(
|
|
||||||
"version" -> v,
|
|
||||||
"scalaVersion" -> s,
|
|
||||||
"crossString" -> (s match {
|
|
||||||
case BinVer(_) => ""
|
|
||||||
case _ => "cross CrossVersion.full"
|
|
||||||
}),
|
|
||||||
"jarName" -> (s match {
|
|
||||||
case BinVer(bv) => "akka-actor_" + bv + "-" + v + ".jar"
|
|
||||||
case _ => "akka-actor_" + s + "-" + v + ".jar"
|
|
||||||
}),
|
|
||||||
"binVersion" -> (s match {
|
|
||||||
case BinVer(bv) => bv
|
|
||||||
case _ => s
|
|
||||||
}),
|
|
||||||
"sigarVersion" -> Dependencies.Compile.sigar.revision,
|
|
||||||
"github" -> githubUrl(v)
|
|
||||||
)
|
|
||||||
},
|
|
||||||
preprocess <<= (sourceDirectory, target in preprocess, cacheDirectory, preprocessExts, preprocessVars, streams) map {
|
|
||||||
(src, target, cacheDir, exts, vars, s) => simplePreprocess(src, target, cacheDir / "sphinx" / "preprocessed", exts, vars, s.log)
|
|
||||||
},
|
|
||||||
sphinxInputs <<= (sphinxInputs, preprocess) map { (inputs, preprocessed) => inputs.copy(src = preprocessed) }
|
|
||||||
)) ++ Seq(
|
|
||||||
cleanFiles <+= target in preprocess in Sphinx
|
|
||||||
)
|
|
||||||
|
|
||||||
lazy val formatSettings = SbtScalariform.scalariformSettings ++ Seq(
|
|
||||||
ScalariformKeys.preferences in Compile := formattingPreferences,
|
|
||||||
ScalariformKeys.preferences in Test := formattingPreferences
|
|
||||||
)
|
|
||||||
|
|
||||||
lazy val docFormatSettings = SbtScalariform.scalariformSettings ++ Seq(
|
|
||||||
ScalariformKeys.preferences in Compile := docFormattingPreferences,
|
|
||||||
ScalariformKeys.preferences in Test := docFormattingPreferences,
|
|
||||||
ScalariformKeys.preferences in MultiJvm := docFormattingPreferences
|
|
||||||
)
|
|
||||||
|
|
||||||
def formattingPreferences = {
|
|
||||||
import scalariform.formatter.preferences._
|
|
||||||
FormattingPreferences()
|
|
||||||
.setPreference(RewriteArrowSymbols, true)
|
|
||||||
.setPreference(AlignParameters, true)
|
|
||||||
.setPreference(AlignSingleLineCaseStatements, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
def docFormattingPreferences = {
|
|
||||||
import scalariform.formatter.preferences._
|
|
||||||
FormattingPreferences()
|
|
||||||
.setPreference(RewriteArrowSymbols, false)
|
|
||||||
.setPreference(AlignParameters, true)
|
|
||||||
.setPreference(AlignSingleLineCaseStatements, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy val multiJvmSettings = SbtMultiJvm.multiJvmSettings ++ inConfig(MultiJvm)(SbtScalariform.configScalariformSettings) ++ Seq(
|
|
||||||
jvmOptions in MultiJvm := defaultMultiJvmOptions,
|
|
||||||
compileInputs in (MultiJvm, compile) <<= (compileInputs in (MultiJvm, compile)) dependsOn (ScalariformKeys.format in MultiJvm),
|
|
||||||
compile in MultiJvm <<= (compile in MultiJvm) triggeredBy (compile in Test),
|
|
||||||
ScalariformKeys.preferences in MultiJvm := formattingPreferences) ++
|
|
||||||
Option(System.getProperty("akka.test.multi-node.hostsFileName")).map(x => Seq(multiNodeHostsFileName in MultiJvm := x)).getOrElse(Seq.empty) ++
|
|
||||||
Option(System.getProperty("akka.test.multi-node.java")).map(x => Seq(multiNodeJavaName in MultiJvm := x)).getOrElse(Seq.empty) ++
|
|
||||||
Option(System.getProperty("akka.test.multi-node.targetDirName")).map(x => Seq(multiNodeTargetDirName in MultiJvm := x)).getOrElse(Seq.empty) ++
|
|
||||||
((executeMultiJvmTests, multiNodeEnabled) match {
|
|
||||||
case (true, true) =>
|
|
||||||
executeTests in Test <<= (executeTests in Test, multiNodeExecuteTests in MultiJvm) map {
|
|
||||||
case (testResults, multiNodeResults) =>
|
|
||||||
val overall =
|
|
||||||
if (testResults.overall.id < multiNodeResults.overall.id)
|
|
||||||
multiNodeResults.overall
|
|
||||||
else
|
|
||||||
testResults.overall
|
|
||||||
Tests.Output(overall,
|
|
||||||
testResults.events ++ multiNodeResults.events,
|
|
||||||
testResults.summaries ++ multiNodeResults.summaries)
|
|
||||||
}
|
|
||||||
case (true, false) =>
|
|
||||||
executeTests in Test <<= (executeTests in Test, executeTests in MultiJvm) map {
|
|
||||||
case (testResults, multiNodeResults) =>
|
|
||||||
val overall =
|
|
||||||
if (testResults.overall.id < multiNodeResults.overall.id)
|
|
||||||
multiNodeResults.overall
|
|
||||||
else
|
|
||||||
testResults.overall
|
|
||||||
Tests.Output(overall,
|
|
||||||
testResults.events ++ multiNodeResults.events,
|
|
||||||
testResults.summaries ++ multiNodeResults.summaries)
|
|
||||||
}
|
|
||||||
case (false, _) => Seq.empty
|
|
||||||
})
|
|
||||||
|
|
||||||
val genjavadocEnabled = System.getProperty("akka.genjavadoc.enabled", "false").toBoolean
|
|
||||||
val (unidocSettings, javadocSettings) =
|
|
||||||
if (genjavadocEnabled) (scalaJavaUnidocSettings, genjavadocSettings)
|
|
||||||
else (scalaUnidocSettings, Nil)
|
|
||||||
|
|
||||||
val docProjectFilter = inAnyProject --
|
|
||||||
inAggregates(samples, transitive = true, includeRoot = true) --
|
|
||||||
inProjects(remoteTests)
|
|
||||||
|
|
||||||
lazy val scaladocDiagramsEnabled = System.getProperty("akka.scaladoc.diagrams", "true").toBoolean
|
|
||||||
lazy val scaladocAutoAPI = System.getProperty("akka.scaladoc.autoapi", "true").toBoolean
|
|
||||||
|
|
||||||
def scaladocOptions(ver: String, base: File): List[String] = {
|
|
||||||
val urlString = githubUrl(ver) + "/€{FILE_PATH}.scala"
|
|
||||||
val opts = List("-implicits", "-doc-source-url", urlString, "-sourcepath", base.getAbsolutePath)
|
|
||||||
if (scaladocDiagramsEnabled) "-diagrams"::opts else opts
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy val scaladocSettings: Seq[sbt.Setting[_]] = {
|
|
||||||
scaladocSettingsNoVerificationOfDiagrams ++
|
|
||||||
(if (scaladocDiagramsEnabled) Seq(doc in Compile ~= scaladocVerifier) else Seq.empty)
|
|
||||||
}
|
|
||||||
|
|
||||||
// for projects with few (one) classes there might not be any diagrams
|
|
||||||
lazy val scaladocSettingsNoVerificationOfDiagrams: Seq[sbt.Setting[_]] = {
|
|
||||||
inTask(doc)(Seq(
|
|
||||||
scalacOptions in Compile <++= (version, baseDirectory in akka) map scaladocOptions,
|
|
||||||
autoAPIMappings := scaladocAutoAPI
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
def scaladocVerifier(file: File): File= {
|
|
||||||
@tailrec
|
|
||||||
def findHTMLFileWithDiagram(dirs: Seq[File]): Boolean = {
|
|
||||||
if (dirs.isEmpty) false
|
|
||||||
else {
|
|
||||||
val curr = dirs.head
|
|
||||||
val (newDirs, files) = curr.listFiles.partition(_.isDirectory)
|
|
||||||
val rest = dirs.tail ++ newDirs
|
|
||||||
val hasDiagram = files exists { f =>
|
|
||||||
val name = f.getName
|
|
||||||
if (name.endsWith(".html") && !name.startsWith("index-") &&
|
|
||||||
!(name.compare("index.html") == 0) && !(name.compare("package.html") == 0)) {
|
|
||||||
val source = scala.io.Source.fromFile(f)("utf-8")
|
|
||||||
val hd = source.getLines().exists(_.contains("<div class=\"toggleContainer block diagram-container\" id=\"inheritance-diagram-container\">"))
|
|
||||||
source.close()
|
|
||||||
hd
|
|
||||||
}
|
|
||||||
else false
|
|
||||||
}
|
|
||||||
hasDiagram || findHTMLFileWithDiagram(rest)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// if we have generated scaladoc and none of the files have a diagram then fail
|
|
||||||
if (file.exists() && !findHTMLFileWithDiagram(List(file)))
|
|
||||||
sys.error("ScalaDoc diagrams not generated!")
|
|
||||||
else
|
|
||||||
file
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy val mimaIgnoredProblems = {
|
lazy val mimaIgnoredProblems = {
|
||||||
import com.typesafe.tools.mima.core._
|
import com.typesafe.tools.mima.core._
|
||||||
Seq(
|
Seq(
|
||||||
|
|
@ -930,195 +618,4 @@ object AkkaBuild extends Build {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// OSGi settings
|
|
||||||
|
|
||||||
object OSGi {
|
|
||||||
|
|
||||||
// The included osgiSettings that creates bundles also publish the jar files
|
|
||||||
// in the .../bundles directory which makes testing locally published artifacts
|
|
||||||
// a pain. Create bundles but publish them to the normal .../jars directory.
|
|
||||||
def osgiSettings = defaultOsgiSettings ++ Seq(
|
|
||||||
packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), OsgiKeys.bundle).identityMap
|
|
||||||
)
|
|
||||||
|
|
||||||
val actor = osgiSettings ++ Seq(
|
|
||||||
OsgiKeys.exportPackage := Seq("akka*"),
|
|
||||||
OsgiKeys.privatePackage := Seq("akka.osgi.impl"),
|
|
||||||
//akka-actor packages are not imported, as contained in the CP
|
|
||||||
OsgiKeys.importPackage := (osgiOptionalImports map optionalResolution) ++ Seq("!sun.misc", scalaImport(), configImport(), "*"),
|
|
||||||
// dynamicImportPackage needed for loading classes defined in configuration
|
|
||||||
OsgiKeys.dynamicImportPackage := Seq("*")
|
|
||||||
)
|
|
||||||
|
|
||||||
val agent = exports(Seq("akka.agent.*"))
|
|
||||||
|
|
||||||
val camel = exports(Seq("akka.camel.*"))
|
|
||||||
|
|
||||||
val cluster = exports(Seq("akka.cluster.*"), imports = Seq(protobufImport()))
|
|
||||||
|
|
||||||
val osgi = exports(Seq("akka.osgi.*"))
|
|
||||||
|
|
||||||
val osgiDiningHakkersSampleApi = exports(Seq("akka.sample.osgi.api"))
|
|
||||||
|
|
||||||
val osgiDiningHakkersSampleCommand = osgiSettings ++ Seq(OsgiKeys.bundleActivator := Option("akka.sample.osgi.command.Activator"), OsgiKeys.privatePackage := Seq("akka.sample.osgi.command"))
|
|
||||||
|
|
||||||
val osgiDiningHakkersSampleCore = exports(Seq("")) ++ Seq(OsgiKeys.bundleActivator := Option("akka.sample.osgi.activation.Activator"), OsgiKeys.privatePackage := Seq("akka.sample.osgi.internal", "akka.sample.osgi.activation", "akka.sample.osgi.service"))
|
|
||||||
|
|
||||||
val osgiDiningHakkersSampleUncommons = exports(Seq("org.uncommons.maths.random")) ++ Seq(OsgiKeys.privatePackage := Seq("org.uncommons.maths.binary", "org.uncommons.maths", "org.uncommons.maths.number"))
|
|
||||||
|
|
||||||
val remote = exports(Seq("akka.remote.*"), imports = Seq(protobufImport()))
|
|
||||||
|
|
||||||
val slf4j = exports(Seq("akka.event.slf4j.*"))
|
|
||||||
|
|
||||||
val persistence = exports(Seq("akka.persistence.*"), imports = Seq(protobufImport()))
|
|
||||||
|
|
||||||
val testkit = exports(Seq("akka.testkit.*"))
|
|
||||||
|
|
||||||
val zeroMQ = exports(Seq("akka.zeromq.*"), imports = Seq(protobufImport()) )
|
|
||||||
|
|
||||||
val osgiOptionalImports = Seq(
|
|
||||||
// needed because testkit is normally not used in the application bundle,
|
|
||||||
// but it should still be included as transitive dependency and used by BundleDelegatingClassLoader
|
|
||||||
// to be able to find refererence.conf
|
|
||||||
"akka.testkit",
|
|
||||||
"com.google.protobuf")
|
|
||||||
|
|
||||||
def exports(packages: Seq[String] = Seq(), imports: Seq[String] = Nil) = osgiSettings ++ Seq(
|
|
||||||
OsgiKeys.importPackage := imports ++ defaultImports,
|
|
||||||
OsgiKeys.exportPackage := packages
|
|
||||||
)
|
|
||||||
def defaultImports = Seq("!sun.misc", akkaImport(), configImport(), scalaImport(), "*")
|
|
||||||
def akkaImport(packageName: String = "akka.*") = versionedImport(packageName, "2.4", "2.5")
|
|
||||||
def configImport(packageName: String = "com.typesafe.config.*") = versionedImport(packageName, "1.2.0", "1.3.0")
|
|
||||||
def protobufImport(packageName: String = "com.google.protobuf.*") = versionedImport(packageName, "2.5.0", "2.6.0")
|
|
||||||
def scalaImport(packageName: String = "scala.*") = versionedImport(packageName, s"$scalaEpoch.$scalaMajor", s"$scalaEpoch.${scalaMajor+1}")
|
|
||||||
def optionalResolution(packageName: String) = "%s;resolution:=optional".format(packageName)
|
|
||||||
def versionedImport(packageName: String, lower: String, upper: String) = s"""$packageName;version="[$lower,$upper)""""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dependencies
|
|
||||||
|
|
||||||
object Dependencies {
|
|
||||||
|
|
||||||
object Versions {
|
|
||||||
val scalaStmVersion = System.getProperty("akka.build.scalaStmVersion", "0.7")
|
|
||||||
val scalaZeroMQVersion = System.getProperty("akka.build.scalaZeroMQVersion", "0.0.7")
|
|
||||||
val genJavaDocVersion = System.getProperty("akka.build.genJavaDocVersion", "0.7")
|
|
||||||
val scalaTestVersion = System.getProperty("akka.build.scalaTestVersion", "2.1.3")
|
|
||||||
val scalaCheckVersion = System.getProperty("akka.build.scalaCheckVersion", "1.11.3")
|
|
||||||
}
|
|
||||||
|
|
||||||
object Compile {
|
|
||||||
import Versions._
|
|
||||||
|
|
||||||
// Several dependencies are mirrored in the OSGi Dining Hackers maven project
|
|
||||||
// They need to be changed in this file as well:
|
|
||||||
// akka-samples/akka-sample-osgi-dining-hakkers/pom.xml
|
|
||||||
|
|
||||||
// Compile
|
|
||||||
val camelCore = "org.apache.camel" % "camel-core" % "2.13.0" exclude("org.slf4j", "slf4j-api") // ApacheV2
|
|
||||||
|
|
||||||
val config = "com.typesafe" % "config" % "1.2.0" // ApacheV2
|
|
||||||
// mirrored in OSGi sample
|
|
||||||
val netty = "io.netty" % "netty" % "3.8.0.Final" // ApacheV2
|
|
||||||
// mirrored in OSGi sample
|
|
||||||
val protobuf = "com.google.protobuf" % "protobuf-java" % "2.5.0" // New BSD
|
|
||||||
val scalaStm = "org.scala-stm" %% "scala-stm" % scalaStmVersion // Modified BSD (Scala)
|
|
||||||
|
|
||||||
val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.5" // MIT
|
|
||||||
val zeroMQClient = "org.zeromq" %% "zeromq-scala-binding" % scalaZeroMQVersion // ApacheV2
|
|
||||||
// mirrored in OSGi sample
|
|
||||||
val uncommonsMath = "org.uncommons.maths" % "uncommons-maths" % "1.2.2a" exclude("jfree", "jcommon") exclude("jfree", "jfreechart") // ApacheV2
|
|
||||||
// mirrored in OSGi sample
|
|
||||||
val osgiCore = "org.osgi" % "org.osgi.core" % "4.3.1" // ApacheV2
|
|
||||||
val osgiCompendium= "org.osgi" % "org.osgi.compendium" % "4.3.1" // ApacheV2
|
|
||||||
// mirrored in OSGi sample
|
|
||||||
val levelDB = "org.iq80.leveldb" % "leveldb" % "0.7" // ApacheV2
|
|
||||||
// mirrored in OSGi sample
|
|
||||||
val levelDBNative = "org.fusesource.leveldbjni" % "leveldbjni-all" % "1.7" // New BSD
|
|
||||||
|
|
||||||
// Camel Sample
|
|
||||||
val camelJetty = "org.apache.camel" % "camel-jetty" % camelCore.revision // ApacheV2
|
|
||||||
|
|
||||||
// Cluster Sample
|
|
||||||
val sigar = "org.fusesource" % "sigar" % "1.6.4" // ApacheV2
|
|
||||||
|
|
||||||
// Compiler plugins
|
|
||||||
val genjavadoc = compilerPlugin("com.typesafe.genjavadoc" %% "genjavadoc-plugin" % genJavaDocVersion cross CrossVersion.full) // ApacheV2
|
|
||||||
|
|
||||||
// Test
|
|
||||||
|
|
||||||
object Test {
|
|
||||||
val commonsMath = "org.apache.commons" % "commons-math" % "2.1" % "test" // ApacheV2
|
|
||||||
val commonsIo = "commons-io" % "commons-io" % "2.4" % "test" // ApacheV2
|
|
||||||
val commonsCodec = "commons-codec" % "commons-codec" % "1.7" % "test" // ApacheV2
|
|
||||||
val junit = "junit" % "junit" % "4.10" % "test" // Common Public License 1.0
|
|
||||||
val logback = "ch.qos.logback" % "logback-classic" % "1.0.13" % "test" // EPL 1.0 / LGPL 2.1
|
|
||||||
val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" // MIT
|
|
||||||
// changing the scalatest dependency must be reflected in akka-docs/rst/dev/multi-jvm-testing.rst
|
|
||||||
// mirrored in OSGi sample
|
|
||||||
val scalatest = "org.scalatest" %% "scalatest" % scalaTestVersion % "test" // ApacheV2
|
|
||||||
val scalacheck = "org.scalacheck" %% "scalacheck" % scalaCheckVersion % "test" // New BSD
|
|
||||||
val pojosr = "com.googlecode.pojosr" % "de.kalpatec.pojosr.framework" % "0.2.1" % "test" // ApacheV2
|
|
||||||
val tinybundles = "org.ops4j.pax.tinybundles" % "tinybundles" % "1.0.0" % "test" // ApacheV2
|
|
||||||
val log4j = "log4j" % "log4j" % "1.2.14" % "test" // ApacheV2
|
|
||||||
val junitIntf = "com.novocode" % "junit-interface" % "0.8" % "test" // MIT
|
|
||||||
// dining hakkers integration test using pax-exam
|
|
||||||
// mirrored in OSGi sample
|
|
||||||
val karafExam = "org.apache.karaf.tooling.exam" % "org.apache.karaf.tooling.exam.container" % "2.3.1" % "test" // ApacheV2
|
|
||||||
// mirrored in OSGi sample
|
|
||||||
val paxExam = "org.ops4j.pax.exam" % "pax-exam-junit4" % "2.6.0" % "test" // ApacheV2
|
|
||||||
val scalaXml = "org.scala-lang.modules" %% "scala-xml" % "1.0.1" % "test"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
import Compile._
|
|
||||||
|
|
||||||
val scalaXmlDepencency = (if (AkkaBuild.requestedScalaVersion.startsWith("2.10")) Nil else Seq(Test.scalaXml))
|
|
||||||
|
|
||||||
val actor = Seq(config)
|
|
||||||
|
|
||||||
val testkit = Seq(Test.junit, Test.scalatest)
|
|
||||||
|
|
||||||
val actorTests = Seq(Test.junit, Test.scalatest, Test.commonsCodec, Test.commonsMath, Test.mockito, Test.scalacheck, protobuf, Test.junitIntf)
|
|
||||||
|
|
||||||
val remote = Seq(netty, protobuf, uncommonsMath, Test.junit, Test.scalatest)
|
|
||||||
|
|
||||||
val remoteTests = Seq(Test.junit, Test.scalatest) ++ scalaXmlDepencency
|
|
||||||
|
|
||||||
val cluster = Seq(Test.junit, Test.scalatest)
|
|
||||||
|
|
||||||
val slf4j = Seq(slf4jApi, Test.logback)
|
|
||||||
|
|
||||||
val agent = Seq(scalaStm, Test.scalatest, Test.junit)
|
|
||||||
|
|
||||||
val persistence = Seq(levelDB, levelDBNative, protobuf, Test.scalatest, Test.junit, Test.commonsIo) ++
|
|
||||||
scalaXmlDepencency
|
|
||||||
|
|
||||||
val kernel = Seq(Test.scalatest, Test.junit)
|
|
||||||
|
|
||||||
val camel = Seq(camelCore, Test.scalatest, Test.junit, Test.mockito, Test.logback, Test.commonsIo, Test.junitIntf)
|
|
||||||
|
|
||||||
val camelSample = Seq(camelJetty)
|
|
||||||
|
|
||||||
val osgi = Seq(osgiCore, osgiCompendium, Test.logback, Test.commonsIo, Test.pojosr, Test.tinybundles, Test.scalatest, Test.junit)
|
|
||||||
|
|
||||||
val osgiDiningHakkersSampleCore = Seq(config, osgiCore, osgiCompendium)
|
|
||||||
|
|
||||||
val osgiDiningHakkersSampleCommand = Seq(osgiCore, osgiCompendium)
|
|
||||||
|
|
||||||
val osgiDiningHakkersSampleTest = Seq(osgiCore, osgiCompendium, Test.karafExam, Test.paxExam, Test.junit, Test.scalatest)
|
|
||||||
|
|
||||||
val uncommons = Seq(uncommonsMath)
|
|
||||||
|
|
||||||
val docs = Seq(Test.scalatest, Test.junit, Test.junitIntf)
|
|
||||||
|
|
||||||
val zeroMQ = Seq(protobuf, zeroMQClient, Test.scalatest, Test.junit)
|
|
||||||
|
|
||||||
val clusterSample = Seq(Test.scalatest, sigar)
|
|
||||||
|
|
||||||
val contrib = Seq(Test.junitIntf, Test.commonsIo)
|
|
||||||
|
|
||||||
val multiNodeSample = Seq(Test.scalatest)
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
124
project/Dependencies.scala
Normal file
124
project/Dependencies.scala
Normal file
|
|
@ -0,0 +1,124 @@
|
||||||
|
package akka
|
||||||
|
|
||||||
|
import sbt._
|
||||||
|
|
||||||
|
object Dependencies {
|
||||||
|
|
||||||
|
object Versions {
|
||||||
|
val scalaVersion = sys.props.get("akka.scalaVersion").getOrElse("2.10.4")
|
||||||
|
val scalaStmVersion = sys.props.get("akka.build.scalaStmVersion").getOrElse("0.7")
|
||||||
|
val scalaZeroMQVersion = sys.props.get("akka.build.scalaZeroMQVersion").getOrElse("0.0.7")
|
||||||
|
val scalaTestVersion = sys.props.get("akka.build.scalaTestVersion").getOrElse("2.1.3")
|
||||||
|
val scalaCheckVersion = sys.props.get("akka.build.scalaCheckVersion").getOrElse("1.11.3")
|
||||||
|
}
|
||||||
|
|
||||||
|
object Compile {
|
||||||
|
import Versions._
|
||||||
|
|
||||||
|
// Several dependencies are mirrored in the OSGi Dining Hackers maven project
|
||||||
|
// They need to be changed in this file as well:
|
||||||
|
// akka-samples/akka-sample-osgi-dining-hakkers/pom.xml
|
||||||
|
|
||||||
|
// Compile
|
||||||
|
val camelCore = "org.apache.camel" % "camel-core" % "2.13.0" exclude("org.slf4j", "slf4j-api") // ApacheV2
|
||||||
|
|
||||||
|
val config = "com.typesafe" % "config" % "1.2.0" // ApacheV2
|
||||||
|
// mirrored in OSGi sample
|
||||||
|
val netty = "io.netty" % "netty" % "3.8.0.Final" // ApacheV2
|
||||||
|
// mirrored in OSGi sample
|
||||||
|
val protobuf = "com.google.protobuf" % "protobuf-java" % "2.5.0" // New BSD
|
||||||
|
val scalaStm = "org.scala-stm" %% "scala-stm" % scalaStmVersion // Modified BSD (Scala)
|
||||||
|
|
||||||
|
val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.5" // MIT
|
||||||
|
val zeroMQClient = "org.zeromq" %% "zeromq-scala-binding" % scalaZeroMQVersion // ApacheV2
|
||||||
|
// mirrored in OSGi sample
|
||||||
|
val uncommonsMath = "org.uncommons.maths" % "uncommons-maths" % "1.2.2a" exclude("jfree", "jcommon") exclude("jfree", "jfreechart") // ApacheV2
|
||||||
|
// mirrored in OSGi sample
|
||||||
|
val osgiCore = "org.osgi" % "org.osgi.core" % "4.3.1" // ApacheV2
|
||||||
|
val osgiCompendium= "org.osgi" % "org.osgi.compendium" % "4.3.1" // ApacheV2
|
||||||
|
// mirrored in OSGi sample
|
||||||
|
val levelDB = "org.iq80.leveldb" % "leveldb" % "0.7" // ApacheV2
|
||||||
|
// mirrored in OSGi sample
|
||||||
|
val levelDBNative = "org.fusesource.leveldbjni" % "leveldbjni-all" % "1.7" // New BSD
|
||||||
|
|
||||||
|
// Camel Sample
|
||||||
|
val camelJetty = "org.apache.camel" % "camel-jetty" % camelCore.revision // ApacheV2
|
||||||
|
|
||||||
|
// Cluster Sample
|
||||||
|
val sigar = "org.fusesource" % "sigar" % "1.6.4" // ApacheV2
|
||||||
|
|
||||||
|
// Test
|
||||||
|
|
||||||
|
object Test {
|
||||||
|
val commonsMath = "org.apache.commons" % "commons-math" % "2.1" % "test" // ApacheV2
|
||||||
|
val commonsIo = "commons-io" % "commons-io" % "2.4" % "test" // ApacheV2
|
||||||
|
val commonsCodec = "commons-codec" % "commons-codec" % "1.7" % "test" // ApacheV2
|
||||||
|
val junit = "junit" % "junit" % "4.10" % "test" // Common Public License 1.0
|
||||||
|
val logback = "ch.qos.logback" % "logback-classic" % "1.0.13" % "test" // EPL 1.0 / LGPL 2.1
|
||||||
|
val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" // MIT
|
||||||
|
// changing the scalatest dependency must be reflected in akka-docs/rst/dev/multi-jvm-testing.rst
|
||||||
|
// mirrored in OSGi sample
|
||||||
|
val scalatest = "org.scalatest" %% "scalatest" % scalaTestVersion % "test" // ApacheV2
|
||||||
|
val scalacheck = "org.scalacheck" %% "scalacheck" % scalaCheckVersion % "test" // New BSD
|
||||||
|
val pojosr = "com.googlecode.pojosr" % "de.kalpatec.pojosr.framework" % "0.2.1" % "test" // ApacheV2
|
||||||
|
val tinybundles = "org.ops4j.pax.tinybundles" % "tinybundles" % "1.0.0" % "test" // ApacheV2
|
||||||
|
val log4j = "log4j" % "log4j" % "1.2.14" % "test" // ApacheV2
|
||||||
|
val junitIntf = "com.novocode" % "junit-interface" % "0.8" % "test" // MIT
|
||||||
|
// dining hakkers integration test using pax-exam
|
||||||
|
// mirrored in OSGi sample
|
||||||
|
val karafExam = "org.apache.karaf.tooling.exam" % "org.apache.karaf.tooling.exam.container" % "2.3.1" % "test" // ApacheV2
|
||||||
|
// mirrored in OSGi sample
|
||||||
|
val paxExam = "org.ops4j.pax.exam" % "pax-exam-junit4" % "2.6.0" % "test" // ApacheV2
|
||||||
|
val scalaXml = "org.scala-lang.modules" %% "scala-xml" % "1.0.1" % "test"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
import Compile._
|
||||||
|
|
||||||
|
val scalaXmlDepencency = (if (Versions.scalaVersion.startsWith("2.10")) Nil else Seq(Test.scalaXml))
|
||||||
|
|
||||||
|
val actor = Seq(config)
|
||||||
|
|
||||||
|
val testkit = Seq(Test.junit, Test.scalatest)
|
||||||
|
|
||||||
|
val actorTests = Seq(Test.junit, Test.scalatest, Test.commonsCodec, Test.commonsMath, Test.mockito, Test.scalacheck, protobuf, Test.junitIntf)
|
||||||
|
|
||||||
|
val remote = Seq(netty, protobuf, uncommonsMath, Test.junit, Test.scalatest)
|
||||||
|
|
||||||
|
val remoteTests = Seq(Test.junit, Test.scalatest) ++ scalaXmlDepencency
|
||||||
|
|
||||||
|
val cluster = Seq(Test.junit, Test.scalatest)
|
||||||
|
|
||||||
|
val slf4j = Seq(slf4jApi, Test.logback)
|
||||||
|
|
||||||
|
val agent = Seq(scalaStm, Test.scalatest, Test.junit)
|
||||||
|
|
||||||
|
val persistence = Seq(levelDB, levelDBNative, protobuf, Test.scalatest, Test.junit, Test.commonsIo) ++
|
||||||
|
scalaXmlDepencency
|
||||||
|
|
||||||
|
val kernel = Seq(Test.scalatest, Test.junit)
|
||||||
|
|
||||||
|
val camel = Seq(camelCore, Test.scalatest, Test.junit, Test.mockito, Test.logback, Test.commonsIo, Test.junitIntf)
|
||||||
|
|
||||||
|
val camelSample = Seq(camelJetty)
|
||||||
|
|
||||||
|
val osgi = Seq(osgiCore, osgiCompendium, Test.logback, Test.commonsIo, Test.pojosr, Test.tinybundles, Test.scalatest, Test.junit)
|
||||||
|
|
||||||
|
val osgiDiningHakkersSampleCore = Seq(config, osgiCore, osgiCompendium)
|
||||||
|
|
||||||
|
val osgiDiningHakkersSampleCommand = Seq(osgiCore, osgiCompendium)
|
||||||
|
|
||||||
|
val osgiDiningHakkersSampleTest = Seq(osgiCore, osgiCompendium, Test.karafExam, Test.paxExam, Test.junit, Test.scalatest)
|
||||||
|
|
||||||
|
val uncommons = Seq(uncommonsMath)
|
||||||
|
|
||||||
|
val docs = Seq(Test.scalatest, Test.junit, Test.junitIntf)
|
||||||
|
|
||||||
|
val zeroMQ = Seq(protobuf, zeroMQClient, Test.scalatest, Test.junit)
|
||||||
|
|
||||||
|
val clusterSample = Seq(Test.scalatest, sigar)
|
||||||
|
|
||||||
|
val contrib = Seq(Test.junitIntf, Test.commonsIo)
|
||||||
|
|
||||||
|
val multiNodeSample = Seq(Test.scalatest)
|
||||||
|
}
|
||||||
35
project/Formatting.scala
Normal file
35
project/Formatting.scala
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
package akka
|
||||||
|
|
||||||
|
import sbt._
|
||||||
|
import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys.MultiJvm
|
||||||
|
import com.typesafe.sbt.SbtScalariform
|
||||||
|
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
|
||||||
|
|
||||||
|
object Formatting {
|
||||||
|
lazy val formatSettings = SbtScalariform.scalariformSettings ++ Seq(
|
||||||
|
ScalariformKeys.preferences in Compile := formattingPreferences,
|
||||||
|
ScalariformKeys.preferences in Test := formattingPreferences
|
||||||
|
)
|
||||||
|
|
||||||
|
lazy val docFormatSettings = SbtScalariform.scalariformSettings ++ Seq(
|
||||||
|
ScalariformKeys.preferences in Compile := docFormattingPreferences,
|
||||||
|
ScalariformKeys.preferences in Test := docFormattingPreferences,
|
||||||
|
ScalariformKeys.preferences in MultiJvm := docFormattingPreferences
|
||||||
|
)
|
||||||
|
|
||||||
|
def formattingPreferences = {
|
||||||
|
import scalariform.formatter.preferences._
|
||||||
|
FormattingPreferences()
|
||||||
|
.setPreference(RewriteArrowSymbols, true)
|
||||||
|
.setPreference(AlignParameters, true)
|
||||||
|
.setPreference(AlignSingleLineCaseStatements, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
def docFormattingPreferences = {
|
||||||
|
import scalariform.formatter.preferences._
|
||||||
|
FormattingPreferences()
|
||||||
|
.setPreference(RewriteArrowSymbols, false)
|
||||||
|
.setPreference(AlignParameters, true)
|
||||||
|
.setPreference(AlignSingleLineCaseStatements, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
8
project/GitHub.scala
Normal file
8
project/GitHub.scala
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
package akka
|
||||||
|
|
||||||
|
object GitHub {
|
||||||
|
def url(v: String): String = {
|
||||||
|
val branch = if (v.endsWith("SNAPSHOT")) "master" else "v" + v
|
||||||
|
"http://github.com/akka/akka/tree/" + branch
|
||||||
|
}
|
||||||
|
}
|
||||||
72
project/MultiNode.scala
Normal file
72
project/MultiNode.scala
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
package akka
|
||||||
|
|
||||||
|
import akka.TestExtras.Filter.Keys._
|
||||||
|
import com.typesafe.sbt.{SbtScalariform, SbtMultiJvm}
|
||||||
|
import sbt._
|
||||||
|
import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys._
|
||||||
|
import sbt.Keys._
|
||||||
|
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
|
||||||
|
|
||||||
|
object MultiNode {
|
||||||
|
def executeMultiJvmTests = Def.setting {
|
||||||
|
onlyTestTags.value.contains("long-running") || !excludeTestTags.value.contains("long-running")
|
||||||
|
}
|
||||||
|
|
||||||
|
val multiNodeEnabled = sys.props.get("akka.test.multi-node").getOrElse("false").toBoolean
|
||||||
|
|
||||||
|
lazy val defaultMultiJvmOptions: Seq[String] = {
|
||||||
|
import scala.collection.JavaConverters._
|
||||||
|
// multinode.D= and multinode.X= makes it possible to pass arbitrary
|
||||||
|
// -D or -X arguments to the forked jvm, e.g.
|
||||||
|
// -Dmultinode.Djava.net.preferIPv4Stack=true -Dmultinode.Xmx512m -Dmultinode.XX:MaxPermSize=256M
|
||||||
|
// -DMultiJvm.akka.cluster.Stress.nrOfNodes=15
|
||||||
|
val MultinodeJvmArgs = "multinode\\.(D|X)(.*)".r
|
||||||
|
val knownPrefix = Set("multnode.", "akka.", "MultiJvm.")
|
||||||
|
val akkaProperties = System.getProperties.propertyNames.asScala.toList.collect {
|
||||||
|
case MultinodeJvmArgs(a, b) =>
|
||||||
|
val value = System.getProperty("multinode." + a + b)
|
||||||
|
"-" + a + b + (if (value == "") "" else "=" + value)
|
||||||
|
case key: String if knownPrefix.exists(pre => key.startsWith(pre)) => "-D" + key + "=" + System.getProperty(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
"-Xmx256m" :: akkaProperties :::
|
||||||
|
(if (sys.props.get("sbt.log.noformat").getOrElse("false").toBoolean) List("-Dakka.test.nocolor=true") else Nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy val defaultMultiJvmScalatestOptions = Def.setting {
|
||||||
|
Seq("-C", "org.scalatest.akka.QuietReporter") ++
|
||||||
|
(if (excludeTestTags.value.isEmpty) Seq.empty else Seq("-l", if (multiNodeEnabled) excludeTestTags.value.mkString("\"", " ", "\"") else excludeTestTags.value.mkString(" "))) ++
|
||||||
|
(if (onlyTestTags.value.isEmpty) Seq.empty else Seq("-n", if (multiNodeEnabled) onlyTestTags.value.mkString("\"", " ", "\"") else onlyTestTags.value.mkString(" ")))
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy val multiJvmSettings = SbtMultiJvm.multiJvmSettings ++ inConfig(MultiJvm)(SbtScalariform.configScalariformSettings) ++ Seq(
|
||||||
|
jvmOptions in MultiJvm := defaultMultiJvmOptions,
|
||||||
|
compileInputs in(MultiJvm, compile) <<= (compileInputs in(MultiJvm, compile)) dependsOn (ScalariformKeys.format in MultiJvm),
|
||||||
|
compile in MultiJvm <<= (compile in MultiJvm) triggeredBy (compile in Test),
|
||||||
|
ScalariformKeys.preferences in MultiJvm := Formatting.formattingPreferences) ++
|
||||||
|
Option(System.getProperty("akka.test.multi-node.hostsFileName")).map(x => Seq(multiNodeHostsFileName in MultiJvm := x)).getOrElse(Seq.empty) ++
|
||||||
|
Option(System.getProperty("akka.test.multi-node.java")).map(x => Seq(multiNodeJavaName in MultiJvm := x)).getOrElse(Seq.empty) ++
|
||||||
|
Option(System.getProperty("akka.test.multi-node.targetDirName")).map(x => Seq(multiNodeTargetDirName in MultiJvm := x)).getOrElse(Seq.empty) ++ {
|
||||||
|
Seq(
|
||||||
|
executeTests in Test := {
|
||||||
|
if (executeMultiJvmTests.value) {
|
||||||
|
val testResults = (executeTests in Test).value
|
||||||
|
val multiNodeResults = multiNodeEnabled match {
|
||||||
|
case true => (multiNodeExecuteTests in MultiJvm).value
|
||||||
|
case false => (executeTests in MultiJvm).value
|
||||||
|
}
|
||||||
|
|
||||||
|
val overall =
|
||||||
|
if (testResults.overall.id < multiNodeResults.overall.id)
|
||||||
|
multiNodeResults.overall
|
||||||
|
else
|
||||||
|
testResults.overall
|
||||||
|
Tests.Output(overall,
|
||||||
|
testResults.events ++ multiNodeResults.events,
|
||||||
|
testResults.summaries ++ multiNodeResults.summaries)
|
||||||
|
}
|
||||||
|
else (executeTests in Test).value
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
71
project/OSGi.scala
Normal file
71
project/OSGi.scala
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|
package akka
|
||||||
|
|
||||||
|
import com.typesafe.sbt.osgi.SbtOsgi._
|
||||||
|
import sbt._
|
||||||
|
import sbt.Keys._
|
||||||
|
|
||||||
|
object OSGi {
|
||||||
|
|
||||||
|
val Seq(scalaEpoch, scalaMajor) = """(\d+)\.(\d+)\..*""".r.unapplySeq(Dependencies.Versions.scalaVersion).get.map(_.toInt)
|
||||||
|
|
||||||
|
// The included osgiSettings that creates bundles also publish the jar files
|
||||||
|
// in the .../bundles directory which makes testing locally published artifacts
|
||||||
|
// a pain. Create bundles but publish them to the normal .../jars directory.
|
||||||
|
def osgiSettings = defaultOsgiSettings ++ Seq(
|
||||||
|
packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), OsgiKeys.bundle).identityMap
|
||||||
|
)
|
||||||
|
|
||||||
|
val actor = osgiSettings ++ Seq(
|
||||||
|
OsgiKeys.exportPackage := Seq("akka*"),
|
||||||
|
OsgiKeys.privatePackage := Seq("akka.osgi.impl"),
|
||||||
|
//akka-actor packages are not imported, as contained in the CP
|
||||||
|
OsgiKeys.importPackage := (osgiOptionalImports map optionalResolution) ++ Seq("!sun.misc", scalaImport(), configImport(), "*"),
|
||||||
|
// dynamicImportPackage needed for loading classes defined in configuration
|
||||||
|
OsgiKeys.dynamicImportPackage := Seq("*")
|
||||||
|
)
|
||||||
|
|
||||||
|
val agent = exports(Seq("akka.agent.*"))
|
||||||
|
|
||||||
|
val camel = exports(Seq("akka.camel.*"))
|
||||||
|
|
||||||
|
val cluster = exports(Seq("akka.cluster.*"), imports = Seq(protobufImport()))
|
||||||
|
|
||||||
|
val osgi = exports(Seq("akka.osgi.*"))
|
||||||
|
|
||||||
|
val osgiDiningHakkersSampleApi = exports(Seq("akka.sample.osgi.api"))
|
||||||
|
|
||||||
|
val osgiDiningHakkersSampleCommand = osgiSettings ++ Seq(OsgiKeys.bundleActivator := Option("akka.sample.osgi.command.Activator"), OsgiKeys.privatePackage := Seq("akka.sample.osgi.command"))
|
||||||
|
|
||||||
|
val osgiDiningHakkersSampleCore = exports(Seq("")) ++ Seq(OsgiKeys.bundleActivator := Option("akka.sample.osgi.activation.Activator"), OsgiKeys.privatePackage := Seq("akka.sample.osgi.internal", "akka.sample.osgi.activation", "akka.sample.osgi.service"))
|
||||||
|
|
||||||
|
val osgiDiningHakkersSampleUncommons = exports(Seq("org.uncommons.maths.random")) ++ Seq(OsgiKeys.privatePackage := Seq("org.uncommons.maths.binary", "org.uncommons.maths", "org.uncommons.maths.number"))
|
||||||
|
|
||||||
|
val remote = exports(Seq("akka.remote.*"), imports = Seq(protobufImport()))
|
||||||
|
|
||||||
|
val slf4j = exports(Seq("akka.event.slf4j.*"))
|
||||||
|
|
||||||
|
val persistence = exports(Seq("akka.persistence.*"), imports = Seq(protobufImport()))
|
||||||
|
|
||||||
|
val testkit = exports(Seq("akka.testkit.*"))
|
||||||
|
|
||||||
|
val zeroMQ = exports(Seq("akka.zeromq.*"), imports = Seq(protobufImport()) )
|
||||||
|
|
||||||
|
val osgiOptionalImports = Seq(
|
||||||
|
// needed because testkit is normally not used in the application bundle,
|
||||||
|
// but it should still be included as transitive dependency and used by BundleDelegatingClassLoader
|
||||||
|
// to be able to find refererence.conf
|
||||||
|
"akka.testkit",
|
||||||
|
"com.google.protobuf")
|
||||||
|
|
||||||
|
def exports(packages: Seq[String] = Seq(), imports: Seq[String] = Nil) = osgiSettings ++ Seq(
|
||||||
|
OsgiKeys.importPackage := imports ++ defaultImports,
|
||||||
|
OsgiKeys.exportPackage := packages
|
||||||
|
)
|
||||||
|
def defaultImports = Seq("!sun.misc", akkaImport(), configImport(), scalaImport(), "*")
|
||||||
|
def akkaImport(packageName: String = "akka.*") = versionedImport(packageName, "2.4", "2.5")
|
||||||
|
def configImport(packageName: String = "com.typesafe.config.*") = versionedImport(packageName, "1.2.0", "1.3.0")
|
||||||
|
def protobufImport(packageName: String = "com.google.protobuf.*") = versionedImport(packageName, "2.5.0", "2.6.0")
|
||||||
|
def scalaImport(packageName: String = "scala.*") = versionedImport(packageName, s"$scalaEpoch.$scalaMajor", s"$scalaEpoch.${scalaMajor+1}")
|
||||||
|
def optionalResolution(packageName: String) = "%s;resolution:=optional".format(packageName)
|
||||||
|
def versionedImport(packageName: String, lower: String, upper: String) = s"""$packageName;version="[$lower,$upper)""""
|
||||||
|
}
|
||||||
74
project/SphinxDoc.scala
Normal file
74
project/SphinxDoc.scala
Normal file
|
|
@ -0,0 +1,74 @@
|
||||||
|
package akka
|
||||||
|
|
||||||
|
import sbt._
|
||||||
|
import com.typesafe.sbt.site.SphinxSupport
|
||||||
|
import com.typesafe.sbt.site.SphinxSupport.{ enableOutput, generatePdf, generatedPdf, generateEpub, generatedEpub, sphinxInputs, sphinxPackages, Sphinx }
|
||||||
|
import sbt.Keys._
|
||||||
|
import com.typesafe.sbt.preprocess.Preprocess._
|
||||||
|
import sbt.LocalProject
|
||||||
|
|
||||||
|
object SphinxDoc {
|
||||||
|
|
||||||
|
def akkaSettings = SphinxSupport.settings ++ Seq(
|
||||||
|
// generate online version of docs
|
||||||
|
sphinxInputs in Sphinx <<= sphinxInputs in Sphinx in LocalProject(AkkaBuild.docs.id) map { inputs => inputs.copy(tags = inputs.tags :+ "online") },
|
||||||
|
// don't regenerate the pdf, just reuse the akka-docs version
|
||||||
|
generatedPdf in Sphinx <<= generatedPdf in Sphinx in LocalProject(AkkaBuild.docs.id) map identity,
|
||||||
|
generatedEpub in Sphinx <<= generatedEpub in Sphinx in LocalProject(AkkaBuild.docs.id) map identity
|
||||||
|
)
|
||||||
|
|
||||||
|
def docsSettings = Seq(
|
||||||
|
sourceDirectory in Sphinx <<= baseDirectory / "rst",
|
||||||
|
sphinxPackages in Sphinx <+= baseDirectory { _ / "_sphinx" / "pygments" },
|
||||||
|
// copy akka-contrib/docs into our rst_preprocess/contrib (and apply substitutions)
|
||||||
|
preprocess in Sphinx <<= (preprocess in Sphinx,
|
||||||
|
baseDirectory in AkkaBuild.contrib,
|
||||||
|
target in preprocess in Sphinx,
|
||||||
|
cacheDirectory,
|
||||||
|
preprocessExts in Sphinx,
|
||||||
|
preprocessVars in Sphinx,
|
||||||
|
streams) map { (orig, src, target, cacheDir, exts, vars, s) =>
|
||||||
|
val contribSrc = Map("contribSrc" -> "../../../akka-contrib")
|
||||||
|
simplePreprocess(src / "docs", target / "contrib", cacheDir / "sphinx" / "preprocessed-contrib", exts, vars ++ contribSrc, s.log)
|
||||||
|
orig
|
||||||
|
},
|
||||||
|
enableOutput in generatePdf in Sphinx := true,
|
||||||
|
enableOutput in generateEpub in Sphinx := true,
|
||||||
|
unmanagedSourceDirectories in Test <<= sourceDirectory in Sphinx apply { _ ** "code" get }
|
||||||
|
)
|
||||||
|
|
||||||
|
// pre-processing settings for sphinx
|
||||||
|
lazy val sphinxPreprocessing = inConfig(Sphinx)(Seq(
|
||||||
|
target in preprocess <<= baseDirectory / "rst_preprocessed",
|
||||||
|
preprocessExts := Set("rst", "py"),
|
||||||
|
// customization of sphinx @<key>@ replacements, add to all sphinx-using projects
|
||||||
|
// add additional replacements here
|
||||||
|
preprocessVars <<= (scalaVersion, version) { (s, v) =>
|
||||||
|
val BinVer = """(\d+\.\d+)\.\d+""".r
|
||||||
|
Map(
|
||||||
|
"version" -> v,
|
||||||
|
"scalaVersion" -> s,
|
||||||
|
"crossString" -> (s match {
|
||||||
|
case BinVer(_) => ""
|
||||||
|
case _ => "cross CrossVersion.full"
|
||||||
|
}),
|
||||||
|
"jarName" -> (s match {
|
||||||
|
case BinVer(bv) => "akka-actor_" + bv + "-" + v + ".jar"
|
||||||
|
case _ => "akka-actor_" + s + "-" + v + ".jar"
|
||||||
|
}),
|
||||||
|
"binVersion" -> (s match {
|
||||||
|
case BinVer(bv) => bv
|
||||||
|
case _ => s
|
||||||
|
}),
|
||||||
|
"sigarVersion" -> Dependencies.Compile.sigar.revision,
|
||||||
|
"github" -> GitHub.url(v)
|
||||||
|
)
|
||||||
|
},
|
||||||
|
preprocess <<= (sourceDirectory, target in preprocess, cacheDirectory, preprocessExts, preprocessVars, streams) map {
|
||||||
|
(src, target, cacheDir, exts, vars, s) => simplePreprocess(src, target, cacheDir / "sphinx" / "preprocessed", exts, vars, s.log)
|
||||||
|
},
|
||||||
|
sphinxInputs <<= (sphinxInputs, preprocess) map { (inputs, preprocessed) => inputs.copy(src = preprocessed) }
|
||||||
|
)) ++ Seq(
|
||||||
|
cleanFiles <+= target in preprocess in Sphinx
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
@ -216,4 +216,43 @@ object TestExtras {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
object Filter {
|
||||||
|
object Keys {
|
||||||
|
val excludeTestNames = settingKey[Set[String]]("Names of tests to be excluded. Not supported by MultiJVM tests. Example usage: -Dakka.test.names.exclude=TimingSpec")
|
||||||
|
val excludeTestTags = settingKey[Set[String]]("Tags of tests to be excluded. It will not be used if you specify -Dakka.test.tags.only. Example usage: -Dakka.test.tags.exclude=long-running")
|
||||||
|
val onlyTestTags = settingKey[Set[String]]("Tags of tests to be ran. Example usage: -Dakka.test.tags.only=long-running")
|
||||||
|
}
|
||||||
|
|
||||||
|
import Keys._
|
||||||
|
|
||||||
|
def settings = {
|
||||||
|
Seq(
|
||||||
|
excludeTestNames := systemPropertyAsSeq("akka.test.names.exclude").toSet,
|
||||||
|
excludeTestTags := {
|
||||||
|
if (onlyTestTags.value.isEmpty) systemPropertyAsSeq("akka.test.tags.exclude").toSet
|
||||||
|
else Set.empty
|
||||||
|
},
|
||||||
|
onlyTestTags := systemPropertyAsSeq("akka.test.tags.only").toSet,
|
||||||
|
|
||||||
|
// add filters for tests excluded by name
|
||||||
|
testOptions in Test <++= excludeTestNames map { _.toSeq.map(exclude => Tests.Filter(test => !test.contains(exclude))) },
|
||||||
|
|
||||||
|
// add arguments for tests excluded by tag
|
||||||
|
testOptions in Test <++= excludeTestTags map { tags =>
|
||||||
|
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-l", tags.mkString(" ")))
|
||||||
|
},
|
||||||
|
|
||||||
|
// add arguments for running only tests by tag
|
||||||
|
testOptions in Test <++= onlyTestTags map { tags =>
|
||||||
|
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-n", tags.mkString(" ")))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def systemPropertyAsSeq(name: String): Seq[String] = {
|
||||||
|
val prop = sys.props.get(name).getOrElse("")
|
||||||
|
if (prop.isEmpty) Seq.empty else prop.split(",").toSeq
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
||||||
79
project/Unidoc.scala
Normal file
79
project/Unidoc.scala
Normal file
|
|
@ -0,0 +1,79 @@
|
||||||
|
package akka
|
||||||
|
|
||||||
|
import sbt._
|
||||||
|
import sbtunidoc.Plugin.UnidocKeys._
|
||||||
|
import sbtunidoc.Plugin.{ ScalaUnidoc, JavaUnidoc, scalaJavaUnidocSettings, genjavadocSettings, scalaUnidocSettings }
|
||||||
|
import sbt.Keys._
|
||||||
|
import sbt.File
|
||||||
|
import scala.annotation.tailrec
|
||||||
|
|
||||||
|
object Unidoc {
|
||||||
|
|
||||||
|
def settings(ignoreAggregates: Seq[Project], ignoreProjects: Seq[Project]) = {
|
||||||
|
val withoutAggregates = ignoreAggregates.foldLeft(inAnyProject) { _ -- inAggregates(_, transitive = true, includeRoot = true) }
|
||||||
|
val docProjectFilter = ignoreProjects.foldLeft(withoutAggregates) { _ -- inProjects(_) }
|
||||||
|
|
||||||
|
inTask(unidoc)(Seq(
|
||||||
|
unidocProjectFilter in ScalaUnidoc := docProjectFilter,
|
||||||
|
unidocProjectFilter in JavaUnidoc := docProjectFilter,
|
||||||
|
apiMappings in ScalaUnidoc := (apiMappings in (Compile, doc)).value
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
val genjavadocEnabled = sys.props.get("akka.genjavadoc.enabled").getOrElse("false").toBoolean
|
||||||
|
val (unidocSettings, javadocSettings) =
|
||||||
|
if (genjavadocEnabled) (scalaJavaUnidocSettings, genjavadocSettings)
|
||||||
|
else (scalaUnidocSettings, Nil)
|
||||||
|
|
||||||
|
lazy val scaladocDiagramsEnabled = sys.props.get("akka.scaladoc.diagrams").getOrElse("true").toBoolean
|
||||||
|
lazy val scaladocAutoAPI = sys.props.get("akka.scaladoc.autoapi").getOrElse("true").toBoolean
|
||||||
|
|
||||||
|
def scaladocSettings: Seq[sbt.Setting[_]] = {
|
||||||
|
scaladocSettingsNoVerificationOfDiagrams ++
|
||||||
|
(if (scaladocDiagramsEnabled) Seq(doc in Compile ~= scaladocVerifier) else Seq.empty)
|
||||||
|
}
|
||||||
|
|
||||||
|
// for projects with few (one) classes there might not be any diagrams
|
||||||
|
def scaladocSettingsNoVerificationOfDiagrams: Seq[sbt.Setting[_]] = {
|
||||||
|
inTask(doc)(Seq(
|
||||||
|
scalacOptions in Compile <++= (version, baseDirectory in ThisBuild) map scaladocOptions,
|
||||||
|
autoAPIMappings := scaladocAutoAPI
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
def scaladocOptions(ver: String, base: File): List[String] = {
|
||||||
|
val urlString = GitHub.url(ver) + "/€{FILE_PATH}.scala"
|
||||||
|
val opts = List("-implicits", "-doc-source-url", urlString, "-sourcepath", base.getAbsolutePath)
|
||||||
|
if (scaladocDiagramsEnabled) "-diagrams"::opts else opts
|
||||||
|
}
|
||||||
|
|
||||||
|
def scaladocVerifier(file: File): File= {
|
||||||
|
@tailrec
|
||||||
|
def findHTMLFileWithDiagram(dirs: Seq[File]): Boolean = {
|
||||||
|
if (dirs.isEmpty) false
|
||||||
|
else {
|
||||||
|
val curr = dirs.head
|
||||||
|
val (newDirs, files) = curr.listFiles.partition(_.isDirectory)
|
||||||
|
val rest = dirs.tail ++ newDirs
|
||||||
|
val hasDiagram = files exists { f =>
|
||||||
|
val name = f.getName
|
||||||
|
if (name.endsWith(".html") && !name.startsWith("index-") &&
|
||||||
|
!(name.compare("index.html") == 0) && !(name.compare("package.html") == 0)) {
|
||||||
|
val source = scala.io.Source.fromFile(f)("utf-8")
|
||||||
|
val hd = source.getLines().exists(_.contains("<div class=\"toggleContainer block diagram-container\" id=\"inheritance-diagram-container\">"))
|
||||||
|
source.close()
|
||||||
|
hd
|
||||||
|
}
|
||||||
|
else false
|
||||||
|
}
|
||||||
|
hasDiagram || findHTMLFileWithDiagram(rest)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we have generated scaladoc and none of the files have a diagram then fail
|
||||||
|
if (file.exists() && !findHTMLFileWithDiagram(List(file)))
|
||||||
|
sys.error("ScalaDoc diagrams not generated!")
|
||||||
|
else
|
||||||
|
file
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue